results / AIdenU /LLAMA-2-13b-ko-Y24_v2.0 /result_2024-01-24 23:59:55.json
open-ko-llm-bot's picture
Add results for 2024-01-24 23:59:55
dac5f08 verified
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.31569965870307165,
"acc_stderr": 0.013582571095815291,
"acc_norm": 0.378839590443686,
"acc_norm_stderr": 0.01417591549000032
},
"harness|ko_hellaswag|10": {
"acc": 0.36128261302529374,
"acc_stderr": 0.004793904922401889,
"acc_norm": 0.46106353316072496,
"acc_norm_stderr": 0.004974628903829141
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.39766081871345027,
"acc_stderr": 0.0375363895576169,
"acc_norm": 0.39766081871345027,
"acc_norm_stderr": 0.0375363895576169
},
"harness|ko_mmlu_management|5": {
"acc": 0.39805825242718446,
"acc_stderr": 0.0484674825397724,
"acc_norm": 0.39805825242718446,
"acc_norm_stderr": 0.0484674825397724
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.4942528735632184,
"acc_stderr": 0.017878782326129234,
"acc_norm": 0.4942528735632184,
"acc_norm_stderr": 0.017878782326129234
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4,
"acc_stderr": 0.042320736951515885,
"acc_norm": 0.4,
"acc_norm_stderr": 0.042320736951515885
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4,
"acc_stderr": 0.03202563076101737,
"acc_norm": 0.4,
"acc_norm_stderr": 0.03202563076101737
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3433734939759036,
"acc_stderr": 0.03696584317010601,
"acc_norm": 0.3433734939759036,
"acc_norm_stderr": 0.03696584317010601
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4565916398713826,
"acc_stderr": 0.028290869054197598,
"acc_norm": 0.4565916398713826,
"acc_norm_stderr": 0.028290869054197598
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.3811659192825112,
"acc_stderr": 0.03259625118416827,
"acc_norm": 0.3811659192825112,
"acc_norm_stderr": 0.03259625118416827
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.4351145038167939,
"acc_stderr": 0.043482080516448585,
"acc_norm": 0.4351145038167939,
"acc_norm_stderr": 0.043482080516448585
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.03540294377095368,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.03540294377095368
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4206896551724138,
"acc_stderr": 0.0411391498118926,
"acc_norm": 0.4206896551724138,
"acc_norm_stderr": 0.0411391498118926
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.041583075330832865,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3739495798319328,
"acc_stderr": 0.031429466378837076,
"acc_norm": 0.3739495798319328,
"acc_norm_stderr": 0.031429466378837076
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.358974358974359,
"acc_stderr": 0.02432173848460237,
"acc_norm": 0.358974358974359,
"acc_norm_stderr": 0.02432173848460237
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.48148148148148145,
"acc_stderr": 0.04830366024635331,
"acc_norm": 0.48148148148148145,
"acc_norm_stderr": 0.04830366024635331
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3891625615763547,
"acc_stderr": 0.034304624161038716,
"acc_norm": 0.3891625615763547,
"acc_norm_stderr": 0.034304624161038716
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.45161290322580644,
"acc_stderr": 0.028310500348568385,
"acc_norm": 0.45161290322580644,
"acc_norm_stderr": 0.028310500348568385
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.03255326307272487,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.03255326307272487
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.37735849056603776,
"acc_stderr": 0.029832808114796005,
"acc_norm": 0.37735849056603776,
"acc_norm_stderr": 0.029832808114796005
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4636363636363636,
"acc_stderr": 0.04776449162396197,
"acc_norm": 0.4636363636363636,
"acc_norm_stderr": 0.04776449162396197
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.24814814814814815,
"acc_stderr": 0.0263357394040558,
"acc_norm": 0.24814814814814815,
"acc_norm_stderr": 0.0263357394040558
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.23841059602649006,
"acc_stderr": 0.03479185572599661,
"acc_norm": 0.23841059602649006,
"acc_norm_stderr": 0.03479185572599661
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.527363184079602,
"acc_stderr": 0.03530235517334682,
"acc_norm": 0.527363184079602,
"acc_norm_stderr": 0.03530235517334682
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.31213872832369943,
"acc_stderr": 0.035331333893236574,
"acc_norm": 0.31213872832369943,
"acc_norm_stderr": 0.035331333893236574
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.022569897074918417,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.022569897074918417
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3194444444444444,
"acc_stderr": 0.03899073687357336,
"acc_norm": 0.3194444444444444,
"acc_norm_stderr": 0.03899073687357336
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.4161849710982659,
"acc_stderr": 0.026538189104705488,
"acc_norm": 0.4161849710982659,
"acc_norm_stderr": 0.026538189104705488
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4049079754601227,
"acc_stderr": 0.03856672163548913,
"acc_norm": 0.4049079754601227,
"acc_norm_stderr": 0.03856672163548913
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4166666666666667,
"acc_stderr": 0.027431623722415012,
"acc_norm": 0.4166666666666667,
"acc_norm_stderr": 0.027431623722415012
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.47150259067357514,
"acc_stderr": 0.036025735712884414,
"acc_norm": 0.47150259067357514,
"acc_norm_stderr": 0.036025735712884414
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.32456140350877194,
"acc_stderr": 0.044045561573747685,
"acc_norm": 0.32456140350877194,
"acc_norm_stderr": 0.044045561573747685
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.41467889908256883,
"acc_stderr": 0.021122903208602595,
"acc_norm": 0.41467889908256883,
"acc_norm_stderr": 0.021122903208602595
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2698412698412698,
"acc_stderr": 0.03970158273235172,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.03970158273235172
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.02818059632825929,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.02818059632825929
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6198347107438017,
"acc_stderr": 0.04431324501968432,
"acc_norm": 0.6198347107438017,
"acc_norm_stderr": 0.04431324501968432
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.47368421052631576,
"acc_stderr": 0.04063302731486671,
"acc_norm": 0.47368421052631576,
"acc_norm_stderr": 0.04063302731486671
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3431372549019608,
"acc_stderr": 0.019206606848825365,
"acc_norm": 0.3431372549019608,
"acc_norm_stderr": 0.019206606848825365
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3049645390070922,
"acc_stderr": 0.027464708442022128,
"acc_norm": 0.3049645390070922,
"acc_norm_stderr": 0.027464708442022128
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.24107142857142858,
"acc_stderr": 0.04059867246952688,
"acc_norm": 0.24107142857142858,
"acc_norm_stderr": 0.04059867246952688
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.0305467452649532,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.0305467452649532
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.25735294117647056,
"acc_stderr": 0.02655651947004153,
"acc_norm": 0.25735294117647056,
"acc_norm_stderr": 0.02655651947004153
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.3183673469387755,
"acc_stderr": 0.029822533793982052,
"acc_norm": 0.3183673469387755,
"acc_norm_stderr": 0.029822533793982052
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.4472573839662447,
"acc_stderr": 0.03236564251614192,
"acc_norm": 0.4472573839662447,
"acc_norm_stderr": 0.03236564251614192
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2926988265971317,
"acc_stderr": 0.01162094919584953,
"acc_norm": 0.2926988265971317,
"acc_norm_stderr": 0.01162094919584953
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.37254901960784315,
"acc_stderr": 0.03393388584958404,
"acc_norm": 0.37254901960784315,
"acc_norm_stderr": 0.03393388584958404
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.40606060606060607,
"acc_stderr": 0.03834816355401181,
"acc_norm": 0.40606060606060607,
"acc_norm_stderr": 0.03834816355401181
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.26193390452876375,
"mc1_stderr": 0.015392118805015006,
"mc2": 0.4252802014875463,
"mc2_stderr": 0.015159718417104805
},
"harness|ko_commongen_v2|2": {
"acc": 0.4179456906729634,
"acc_stderr": 0.01695729200527972,
"acc_norm": 0.5324675324675324,
"acc_norm_stderr": 0.017154073716682865
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "AIdenU/LLAMA-2-13b-ko-Y24_v2.0",
"model_sha": "f58dd2241e16a20ea477d50451305dcfa336c881",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}