results / Cartinoe5930 /original-KoRAE-13b-3ep /result_2023-11-29 23:33:12.json
open-ko-llm-bot's picture
Add results for 2023-11-29 23:33:12
c541a50
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3856655290102389,
"acc_stderr": 0.014224250973257182,
"acc_norm": 0.44368600682593856,
"acc_norm_stderr": 0.014518421825670447
},
"harness|ko_hellaswag|10": {
"acc": 0.4207329217287393,
"acc_stderr": 0.004926678108601343,
"acc_norm": 0.5697072296355308,
"acc_norm_stderr": 0.004941051795214797
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.038342347441649924,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.038342347441649924
},
"harness|ko_mmlu_management|5": {
"acc": 0.49514563106796117,
"acc_stderr": 0.04950504382128921,
"acc_norm": 0.49514563106796117,
"acc_norm_stderr": 0.04950504382128921
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.541507024265645,
"acc_stderr": 0.017818248603465578,
"acc_norm": 0.541507024265645,
"acc_norm_stderr": 0.017818248603465578
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4888888888888889,
"acc_stderr": 0.04318275491977978,
"acc_norm": 0.4888888888888889,
"acc_norm_stderr": 0.04318275491977978
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206824,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206824
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.41702127659574467,
"acc_stderr": 0.03223276266711712,
"acc_norm": 0.41702127659574467,
"acc_norm_stderr": 0.03223276266711712
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4759036144578313,
"acc_stderr": 0.03887971849597264,
"acc_norm": 0.4759036144578313,
"acc_norm_stderr": 0.03887971849597264
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.49517684887459806,
"acc_stderr": 0.02839677044411129,
"acc_norm": 0.49517684887459806,
"acc_norm_stderr": 0.02839677044411129
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5112107623318386,
"acc_stderr": 0.033549366530984746,
"acc_norm": 0.5112107623318386,
"acc_norm_stderr": 0.033549366530984746
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.46564885496183206,
"acc_stderr": 0.04374928560599738,
"acc_norm": 0.46564885496183206,
"acc_norm_stderr": 0.04374928560599738
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5757575757575758,
"acc_stderr": 0.03521224908841586,
"acc_norm": 0.5757575757575758,
"acc_norm_stderr": 0.03521224908841586
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4,
"acc_stderr": 0.040824829046386284,
"acc_norm": 0.4,
"acc_norm_stderr": 0.040824829046386284
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.041583075330832865,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.4789915966386555,
"acc_stderr": 0.03244980849990029,
"acc_norm": 0.4789915966386555,
"acc_norm_stderr": 0.03244980849990029
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.43333333333333335,
"acc_stderr": 0.02512465352588513,
"acc_norm": 0.43333333333333335,
"acc_norm_stderr": 0.02512465352588513
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.04830366024635331,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.04830366024635331
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4236453201970443,
"acc_stderr": 0.03476725747649037,
"acc_norm": 0.4236453201970443,
"acc_norm_stderr": 0.03476725747649037
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.47419354838709676,
"acc_stderr": 0.028406095057653315,
"acc_norm": 0.47419354838709676,
"acc_norm_stderr": 0.028406095057653315
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6367521367521367,
"acc_stderr": 0.03150712523091265,
"acc_norm": 0.6367521367521367,
"acc_norm_stderr": 0.03150712523091265
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4339622641509434,
"acc_stderr": 0.0305032920133426,
"acc_norm": 0.4339622641509434,
"acc_norm_stderr": 0.0305032920133426
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.509090909090909,
"acc_stderr": 0.04788339768702861,
"acc_norm": 0.509090909090909,
"acc_norm_stderr": 0.04788339768702861
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2518518518518518,
"acc_stderr": 0.026466117538959916,
"acc_norm": 0.2518518518518518,
"acc_norm_stderr": 0.026466117538959916
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.03802039760107903,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.03802039760107903
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.582089552238806,
"acc_stderr": 0.034875586404620636,
"acc_norm": 0.582089552238806,
"acc_norm_stderr": 0.034875586404620636
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.41040462427745666,
"acc_stderr": 0.03750757044895538,
"acc_norm": 0.41040462427745666,
"acc_norm_stderr": 0.03750757044895538
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.29365079365079366,
"acc_stderr": 0.023456037383982026,
"acc_norm": 0.29365079365079366,
"acc_norm_stderr": 0.023456037383982026
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3958333333333333,
"acc_stderr": 0.040894654493255835,
"acc_norm": 0.3958333333333333,
"acc_norm_stderr": 0.040894654493255835
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.62,
"acc_stderr": 0.048783173121456344,
"acc_norm": 0.62,
"acc_norm_stderr": 0.048783173121456344
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5057803468208093,
"acc_stderr": 0.026917296179149116,
"acc_norm": 0.5057803468208093,
"acc_norm_stderr": 0.026917296179149116
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.49079754601226994,
"acc_stderr": 0.03927705600787443,
"acc_norm": 0.49079754601226994,
"acc_norm_stderr": 0.03927705600787443
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.48148148148148145,
"acc_stderr": 0.027801656212323667,
"acc_norm": 0.48148148148148145,
"acc_norm_stderr": 0.027801656212323667
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252606,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252606
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5181347150259067,
"acc_stderr": 0.036060650018329185,
"acc_norm": 0.5181347150259067,
"acc_norm_stderr": 0.036060650018329185
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512321984,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.042270544512321984
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5541284403669725,
"acc_stderr": 0.021311335009708575,
"acc_norm": 0.5541284403669725,
"acc_norm_stderr": 0.021311335009708575
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.31746031746031744,
"acc_stderr": 0.0416345303130286,
"acc_norm": 0.31746031746031744,
"acc_norm_stderr": 0.0416345303130286
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.45098039215686275,
"acc_stderr": 0.028491993586171563,
"acc_norm": 0.45098039215686275,
"acc_norm_stderr": 0.028491993586171563
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6115702479338843,
"acc_stderr": 0.044492703500683836,
"acc_norm": 0.6115702479338843,
"acc_norm_stderr": 0.044492703500683836
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.40789473684210525,
"acc_stderr": 0.03999309712777472,
"acc_norm": 0.40789473684210525,
"acc_norm_stderr": 0.03999309712777472
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.38562091503267976,
"acc_stderr": 0.01969145905235415,
"acc_norm": 0.38562091503267976,
"acc_norm_stderr": 0.01969145905235415
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.29432624113475175,
"acc_stderr": 0.02718712701150381,
"acc_norm": 0.29432624113475175,
"acc_norm_stderr": 0.02718712701150381
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.25892857142857145,
"acc_stderr": 0.041577515398656284,
"acc_norm": 0.25892857142857145,
"acc_norm_stderr": 0.041577515398656284
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3425925925925926,
"acc_stderr": 0.032365852526021574,
"acc_norm": 0.3425925925925926,
"acc_norm_stderr": 0.032365852526021574
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.4,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.45,
"acc_stderr": 0.05,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.38235294117647056,
"acc_stderr": 0.029520095697687758,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.029520095697687758
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.43673469387755104,
"acc_stderr": 0.031751952375833226,
"acc_norm": 0.43673469387755104,
"acc_norm_stderr": 0.031751952375833226
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6244725738396625,
"acc_stderr": 0.03152256243091156,
"acc_norm": 0.6244725738396625,
"acc_norm_stderr": 0.03152256243091156
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.33376792698826596,
"acc_stderr": 0.012043812655846147,
"acc_norm": 0.33376792698826596,
"acc_norm_stderr": 0.012043812655846147
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4215686274509804,
"acc_stderr": 0.03465868196380758,
"acc_norm": 0.4215686274509804,
"acc_norm_stderr": 0.03465868196380758
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5393939393939394,
"acc_stderr": 0.03892207016552012,
"acc_norm": 0.5393939393939394,
"acc_norm_stderr": 0.03892207016552012
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2631578947368421,
"mc1_stderr": 0.015415241740237024,
"mc2": 0.417499174328329,
"mc2_stderr": 0.014766097200285613
},
"harness|ko_commongen_v2|2": {
"acc": 0.4793388429752066,
"acc_stderr": 0.01717567127983645,
"acc_norm": 0.5442739079102715,
"acc_norm_stderr": 0.017122829143292658
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Cartinoe5930/original-KoRAE-13b-3ep",
"model_sha": "6c109c149338c1aff8de13e82058abedb03b754d",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}