results / maywell /Synatra-11B-Testbench-2 /result_2023-10-16 00:23:46.json
open-ko-llm-bot's picture
Add results for 2023-10-16 00:23:46
a5af96d
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3455631399317406,
"acc_stderr": 0.013896938461145685,
"acc_norm": 0.4197952218430034,
"acc_norm_stderr": 0.01442218122630302
},
"harness|ko_hellaswag|10": {
"acc": 0.37731527584146585,
"acc_stderr": 0.00483724201519111,
"acc_norm": 0.48775144393547104,
"acc_norm_stderr": 0.0049882839816310495
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5263157894736842,
"acc_stderr": 0.03829509868994727,
"acc_norm": 0.5263157894736842,
"acc_norm_stderr": 0.03829509868994727
},
"harness|ko_mmlu_management|5": {
"acc": 0.4854368932038835,
"acc_stderr": 0.04948637324026637,
"acc_norm": 0.4854368932038835,
"acc_norm_stderr": 0.04948637324026637
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.49680715197956576,
"acc_stderr": 0.01787959894593307,
"acc_norm": 0.49680715197956576,
"acc_norm_stderr": 0.01787959894593307
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.32592592592592595,
"acc_stderr": 0.040491220417025055,
"acc_norm": 0.32592592592592595,
"acc_norm_stderr": 0.040491220417025055
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3404255319148936,
"acc_stderr": 0.030976692998534443,
"acc_norm": 0.3404255319148936,
"acc_norm_stderr": 0.030976692998534443
},
"harness|ko_mmlu_virology|5": {
"acc": 0.39156626506024095,
"acc_stderr": 0.037998574544796354,
"acc_norm": 0.39156626506024095,
"acc_norm_stderr": 0.037998574544796354
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4630225080385852,
"acc_stderr": 0.02832032583010591,
"acc_norm": 0.4630225080385852,
"acc_norm_stderr": 0.02832032583010591
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.40358744394618834,
"acc_stderr": 0.03292802819330314,
"acc_norm": 0.40358744394618834,
"acc_norm_stderr": 0.03292802819330314
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.48854961832061067,
"acc_stderr": 0.043841400240780176,
"acc_norm": 0.48854961832061067,
"acc_norm_stderr": 0.043841400240780176
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620333
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5656565656565656,
"acc_stderr": 0.03531505879359183,
"acc_norm": 0.5656565656565656,
"acc_norm_stderr": 0.03531505879359183
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5103448275862069,
"acc_stderr": 0.04165774775728762,
"acc_norm": 0.5103448275862069,
"acc_norm_stderr": 0.04165774775728762
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.27450980392156865,
"acc_stderr": 0.04440521906179327,
"acc_norm": 0.27450980392156865,
"acc_norm_stderr": 0.04440521906179327
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.4411764705882353,
"acc_stderr": 0.032252942323996406,
"acc_norm": 0.4411764705882353,
"acc_norm_stderr": 0.032252942323996406
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4358974358974359,
"acc_stderr": 0.025141801511177498,
"acc_norm": 0.4358974358974359,
"acc_norm_stderr": 0.025141801511177498
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956912
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.46296296296296297,
"acc_stderr": 0.04820403072760627,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.04820403072760627
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.37438423645320196,
"acc_stderr": 0.03405155380561952,
"acc_norm": 0.37438423645320196,
"acc_norm_stderr": 0.03405155380561952
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.44516129032258067,
"acc_stderr": 0.02827241018621491,
"acc_norm": 0.44516129032258067,
"acc_norm_stderr": 0.02827241018621491
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6367521367521367,
"acc_stderr": 0.03150712523091264,
"acc_norm": 0.6367521367521367,
"acc_norm_stderr": 0.03150712523091264
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5056603773584906,
"acc_stderr": 0.03077090076385131,
"acc_norm": 0.5056603773584906,
"acc_norm_stderr": 0.03077090076385131
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4727272727272727,
"acc_stderr": 0.04782001791380063,
"acc_norm": 0.4727272727272727,
"acc_norm_stderr": 0.04782001791380063
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.28888888888888886,
"acc_stderr": 0.027634907264178544,
"acc_norm": 0.28888888888888886,
"acc_norm_stderr": 0.027634907264178544
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2980132450331126,
"acc_stderr": 0.037345356767871984,
"acc_norm": 0.2980132450331126,
"acc_norm_stderr": 0.037345356767871984
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5870646766169154,
"acc_stderr": 0.03481520803367348,
"acc_norm": 0.5870646766169154,
"acc_norm_stderr": 0.03481520803367348
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.4277456647398844,
"acc_stderr": 0.03772446857518028,
"acc_norm": 0.4277456647398844,
"acc_norm_stderr": 0.03772446857518028
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.024278568024307688,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.024278568024307688
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.375,
"acc_stderr": 0.04048439222695598,
"acc_norm": 0.375,
"acc_norm_stderr": 0.04048439222695598
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.56,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.56,
"acc_norm_stderr": 0.049888765156985884
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.523121387283237,
"acc_stderr": 0.026890297881303118,
"acc_norm": 0.523121387283237,
"acc_norm_stderr": 0.026890297881303118
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.39263803680981596,
"acc_stderr": 0.038367409078310294,
"acc_norm": 0.39263803680981596,
"acc_norm_stderr": 0.038367409078310294
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.404320987654321,
"acc_stderr": 0.027306625297327684,
"acc_norm": 0.404320987654321,
"acc_norm_stderr": 0.027306625297327684
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5181347150259067,
"acc_stderr": 0.036060650018329185,
"acc_norm": 0.5181347150259067,
"acc_norm_stderr": 0.036060650018329185
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.30701754385964913,
"acc_stderr": 0.043391383225798594,
"acc_norm": 0.30701754385964913,
"acc_norm_stderr": 0.043391383225798594
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.48990825688073397,
"acc_stderr": 0.021432956203453316,
"acc_norm": 0.48990825688073397,
"acc_norm_stderr": 0.021432956203453316
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.38095238095238093,
"acc_stderr": 0.04343525428949097,
"acc_norm": 0.38095238095238093,
"acc_norm_stderr": 0.04343525428949097
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.46078431372549017,
"acc_stderr": 0.028541722692618874,
"acc_norm": 0.46078431372549017,
"acc_norm_stderr": 0.028541722692618874
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237103,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237103
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6446280991735537,
"acc_stderr": 0.0436923632657398,
"acc_norm": 0.6446280991735537,
"acc_norm_stderr": 0.0436923632657398
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4276315789473684,
"acc_stderr": 0.04026097083296558,
"acc_norm": 0.4276315789473684,
"acc_norm_stderr": 0.04026097083296558
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.35784313725490197,
"acc_stderr": 0.019393058402355442,
"acc_norm": 0.35784313725490197,
"acc_norm_stderr": 0.019393058402355442
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.30141843971631205,
"acc_stderr": 0.02737412888263115,
"acc_norm": 0.30141843971631205,
"acc_norm_stderr": 0.02737412888263115
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.41964285714285715,
"acc_stderr": 0.046840993210771065,
"acc_norm": 0.41964285714285715,
"acc_norm_stderr": 0.046840993210771065
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.39351851851851855,
"acc_stderr": 0.03331747876370312,
"acc_norm": 0.39351851851851855,
"acc_norm_stderr": 0.03331747876370312
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.27932960893854747,
"acc_stderr": 0.015005762446786154,
"acc_norm": 0.27932960893854747,
"acc_norm_stderr": 0.015005762446786154
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.41544117647058826,
"acc_stderr": 0.029935342707877746,
"acc_norm": 0.41544117647058826,
"acc_norm_stderr": 0.029935342707877746
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.4857142857142857,
"acc_stderr": 0.03199615232806287,
"acc_norm": 0.4857142857142857,
"acc_norm_stderr": 0.03199615232806287
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5527426160337553,
"acc_stderr": 0.03236564251614192,
"acc_norm": 0.5527426160337553,
"acc_norm_stderr": 0.03236564251614192
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2985658409387223,
"acc_stderr": 0.011688060141794224,
"acc_norm": 0.2985658409387223,
"acc_norm_stderr": 0.011688060141794224
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.38235294117647056,
"acc_stderr": 0.03410785338904719,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.03410785338904719
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.38181818181818183,
"acc_stderr": 0.037937131711656344,
"acc_norm": 0.38181818181818183,
"acc_norm_stderr": 0.037937131711656344
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.27050183598531213,
"mc1_stderr": 0.015550778332842881,
"mc2": 0.4342691202696536,
"mc2_stderr": 0.015037727340783071
},
"harness|ko_commongen_v2|2": {
"acc": 0.3860684769775679,
"acc_stderr": 0.016738130760321743,
"acc_norm": 0.4510035419126328,
"acc_norm_stderr": 0.017107618859549357
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "maywell/Synatra-11B-Testbench-2",
"model_sha": "50c90dfe257d5c5ad4c3c6a1fb29f6a5066c085a",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}