results / 42MARU /GenAI-llama2-ko-en-platypus-13B /result_2023-10-29 00:31:00.json
open-ko-llm-bot's picture
Add results for 2023-10-29 00:31:00
7b49f99
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3839590443686007,
"acc_stderr": 0.01421244498065189,
"acc_norm": 0.4522184300341297,
"acc_norm_stderr": 0.014544519880633832
},
"harness|ko_hellaswag|10": {
"acc": 0.4166500697072296,
"acc_stderr": 0.00491996282220832,
"acc_norm": 0.5524795857398924,
"acc_norm_stderr": 0.004962220512548352
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5614035087719298,
"acc_stderr": 0.038057975055904594,
"acc_norm": 0.5614035087719298,
"acc_norm_stderr": 0.038057975055904594
},
"harness|ko_mmlu_management|5": {
"acc": 0.49514563106796117,
"acc_stderr": 0.049505043821289195,
"acc_norm": 0.49514563106796117,
"acc_norm_stderr": 0.049505043821289195
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5351213282247765,
"acc_stderr": 0.017835798806290642,
"acc_norm": 0.5351213282247765,
"acc_norm_stderr": 0.017835798806290642
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34814814814814815,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.34814814814814815,
"acc_norm_stderr": 0.041153246103369526
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.31063829787234043,
"acc_stderr": 0.03025123757921317,
"acc_norm": 0.31063829787234043,
"acc_norm_stderr": 0.03025123757921317
},
"harness|ko_mmlu_virology|5": {
"acc": 0.42168674698795183,
"acc_stderr": 0.038444531817709175,
"acc_norm": 0.42168674698795183,
"acc_norm_stderr": 0.038444531817709175
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5048231511254019,
"acc_stderr": 0.02839677044411129,
"acc_norm": 0.5048231511254019,
"acc_norm_stderr": 0.02839677044411129
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4484304932735426,
"acc_stderr": 0.03337883736255099,
"acc_norm": 0.4484304932735426,
"acc_norm_stderr": 0.03337883736255099
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5267175572519084,
"acc_stderr": 0.04379024936553894,
"acc_norm": 0.5267175572519084,
"acc_norm_stderr": 0.04379024936553894
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.43,
"acc_stderr": 0.04975698519562429,
"acc_norm": 0.43,
"acc_norm_stderr": 0.04975698519562429
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.494949494949495,
"acc_stderr": 0.035621707606254015,
"acc_norm": 0.494949494949495,
"acc_norm_stderr": 0.035621707606254015
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4068965517241379,
"acc_stderr": 0.04093793981266236,
"acc_norm": 0.4068965517241379,
"acc_norm_stderr": 0.04093793981266236
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.17647058823529413,
"acc_stderr": 0.0379328118530781,
"acc_norm": 0.17647058823529413,
"acc_norm_stderr": 0.0379328118530781
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.4957983193277311,
"acc_stderr": 0.03247734334448111,
"acc_norm": 0.4957983193277311,
"acc_norm_stderr": 0.03247734334448111
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4230769230769231,
"acc_stderr": 0.025049197876042328,
"acc_norm": 0.4230769230769231,
"acc_norm_stderr": 0.025049197876042328
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.46296296296296297,
"acc_stderr": 0.04820403072760628,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.04820403072760628
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.35960591133004927,
"acc_stderr": 0.03376458246509568,
"acc_norm": 0.35960591133004927,
"acc_norm_stderr": 0.03376458246509568
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.43548387096774194,
"acc_stderr": 0.02820622559150275,
"acc_norm": 0.43548387096774194,
"acc_norm_stderr": 0.02820622559150275
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6581196581196581,
"acc_stderr": 0.03107502852650775,
"acc_norm": 0.6581196581196581,
"acc_norm_stderr": 0.03107502852650775
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4490566037735849,
"acc_stderr": 0.030612730713641095,
"acc_norm": 0.4490566037735849,
"acc_norm_stderr": 0.030612730713641095
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.41818181818181815,
"acc_stderr": 0.04724577405731572,
"acc_norm": 0.41818181818181815,
"acc_norm_stderr": 0.04724577405731572
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.23333333333333334,
"acc_stderr": 0.02578787422095932,
"acc_norm": 0.23333333333333334,
"acc_norm_stderr": 0.02578787422095932
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2052980132450331,
"acc_stderr": 0.032979866484738336,
"acc_norm": 0.2052980132450331,
"acc_norm_stderr": 0.032979866484738336
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5771144278606966,
"acc_stderr": 0.034932317774212816,
"acc_norm": 0.5771144278606966,
"acc_norm_stderr": 0.034932317774212816
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3699421965317919,
"acc_stderr": 0.036812296333943194,
"acc_norm": 0.3699421965317919,
"acc_norm_stderr": 0.036812296333943194
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2830687830687831,
"acc_stderr": 0.023201392938194978,
"acc_norm": 0.2830687830687831,
"acc_norm_stderr": 0.023201392938194978
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4166666666666667,
"acc_stderr": 0.04122728707651282,
"acc_norm": 0.4166666666666667,
"acc_norm_stderr": 0.04122728707651282
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.66,
"acc_stderr": 0.04760952285695237,
"acc_norm": 0.66,
"acc_norm_stderr": 0.04760952285695237
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5057803468208093,
"acc_stderr": 0.02691729617914911,
"acc_norm": 0.5057803468208093,
"acc_norm_stderr": 0.02691729617914911
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.44785276073619634,
"acc_stderr": 0.03906947479456602,
"acc_norm": 0.44785276073619634,
"acc_norm_stderr": 0.03906947479456602
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4722222222222222,
"acc_stderr": 0.027777777777777797,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.027777777777777797
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5647668393782384,
"acc_stderr": 0.03578038165008586,
"acc_norm": 0.5647668393782384,
"acc_norm_stderr": 0.03578038165008586
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.03999423879281335,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.03999423879281335
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5522935779816514,
"acc_stderr": 0.02131975496242546,
"acc_norm": 0.5522935779816514,
"acc_norm_stderr": 0.02131975496242546
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.040061680838488774,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.040061680838488774
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.45751633986928103,
"acc_stderr": 0.02852638345214264,
"acc_norm": 0.45751633986928103,
"acc_norm_stderr": 0.02852638345214264
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6859504132231405,
"acc_stderr": 0.04236964753041018,
"acc_norm": 0.6859504132231405,
"acc_norm_stderr": 0.04236964753041018
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.45394736842105265,
"acc_stderr": 0.04051646342874141,
"acc_norm": 0.45394736842105265,
"acc_norm_stderr": 0.04051646342874141
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3839869281045752,
"acc_stderr": 0.019675808135281525,
"acc_norm": 0.3839869281045752,
"acc_norm_stderr": 0.019675808135281525
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.35815602836879434,
"acc_stderr": 0.02860208586275942,
"acc_norm": 0.35815602836879434,
"acc_norm_stderr": 0.02860208586275942
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.042878587513404544,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.042878587513404544
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.25462962962962965,
"acc_stderr": 0.02971127586000534,
"acc_norm": 0.25462962962962965,
"acc_norm_stderr": 0.02971127586000534
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.293854748603352,
"acc_stderr": 0.015235075776719616,
"acc_norm": 0.293854748603352,
"acc_norm_stderr": 0.015235075776719616
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.27941176470588236,
"acc_stderr": 0.02725720260611495,
"acc_norm": 0.27941176470588236,
"acc_norm_stderr": 0.02725720260611495
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.4775510204081633,
"acc_stderr": 0.031976941187136725,
"acc_norm": 0.4775510204081633,
"acc_norm_stderr": 0.031976941187136725
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6075949367088608,
"acc_stderr": 0.0317847187456473,
"acc_norm": 0.6075949367088608,
"acc_norm_stderr": 0.0317847187456473
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.33116036505867014,
"acc_stderr": 0.01202012819598576,
"acc_norm": 0.33116036505867014,
"acc_norm_stderr": 0.01202012819598576
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.49019607843137253,
"acc_stderr": 0.03508637358630572,
"acc_norm": 0.49019607843137253,
"acc_norm_stderr": 0.03508637358630572
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5454545454545454,
"acc_stderr": 0.038881769216741004,
"acc_norm": 0.5454545454545454,
"acc_norm_stderr": 0.038881769216741004
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.27539779681762544,
"mc1_stderr": 0.015638135667775523,
"mc2": 0.4478444454695957,
"mc2_stderr": 0.015296142940086415
},
"harness|ko_commongen_v2|2": {
"acc": 0.5454545454545454,
"acc_stderr": 0.017119172208061504,
"acc_norm": 0.5938606847697757,
"acc_norm_stderr": 0.016884749503191396
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "42MARU/GenAI-llama2-ko-en-platypus-13B",
"model_sha": "61d276d0715184790bae2979744f1ae7c0f451c0",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}