results / AIFT /AIFT-instruct-SFT-1.3B-v1.6.2 /result_2024-02-27 08:46:20.json
open-ko-llm-bot's picture
Add results for 2024-02-27 08:46:20
95495ed verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2901023890784983,
"acc_stderr": 0.013261573677520773,
"acc_norm": 0.34215017064846415,
"acc_norm_stderr": 0.013864152159177278
},
"harness|ko_hellaswag|10": {
"acc": 0.3567018522206732,
"acc_stderr": 0.0047804672709117636,
"acc_norm": 0.4446325433180641,
"acc_norm_stderr": 0.004959094146471525
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.2573099415204678,
"acc_stderr": 0.03352799844161865,
"acc_norm": 0.2573099415204678,
"acc_norm_stderr": 0.03352799844161865
},
"harness|ko_mmlu_management|5": {
"acc": 0.1650485436893204,
"acc_stderr": 0.036756688322331886,
"acc_norm": 0.1650485436893204,
"acc_norm_stderr": 0.036756688322331886
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.015302380123542089,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.015302380123542089
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.035914440841969694,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.035914440841969694
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816506
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2851063829787234,
"acc_stderr": 0.029513196625539355,
"acc_norm": 0.2851063829787234,
"acc_norm_stderr": 0.029513196625539355
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3253012048192771,
"acc_stderr": 0.03647168523683228,
"acc_norm": 0.3253012048192771,
"acc_norm_stderr": 0.03647168523683228
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.27009646302250806,
"acc_stderr": 0.025218040373410598,
"acc_norm": 0.27009646302250806,
"acc_norm_stderr": 0.025218040373410598
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.3183856502242152,
"acc_stderr": 0.03126580522513713,
"acc_norm": 0.3183856502242152,
"acc_norm_stderr": 0.03126580522513713
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.22137404580152673,
"acc_stderr": 0.0364129708131373,
"acc_norm": 0.22137404580152673,
"acc_norm_stderr": 0.0364129708131373
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.16161616161616163,
"acc_stderr": 0.026225919863629283,
"acc_norm": 0.16161616161616163,
"acc_norm_stderr": 0.026225919863629283
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03565998174135302,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.03565998174135302
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.04158307533083286,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.04158307533083286
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.23109243697478993,
"acc_stderr": 0.027381406927868963,
"acc_norm": 0.23109243697478993,
"acc_norm_stderr": 0.027381406927868963
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2948717948717949,
"acc_stderr": 0.023119362758232273,
"acc_norm": 0.2948717948717949,
"acc_norm_stderr": 0.023119362758232273
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.26851851851851855,
"acc_stderr": 0.04284467968052191,
"acc_norm": 0.26851851851851855,
"acc_norm_stderr": 0.04284467968052191
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.17733990147783252,
"acc_stderr": 0.026874337276808342,
"acc_norm": 0.17733990147783252,
"acc_norm_stderr": 0.026874337276808342
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.22258064516129034,
"acc_stderr": 0.023664216671642518,
"acc_norm": 0.22258064516129034,
"acc_norm_stderr": 0.023664216671642518
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.2606837606837607,
"acc_stderr": 0.028760348956523414,
"acc_norm": 0.2606837606837607,
"acc_norm_stderr": 0.028760348956523414
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.21132075471698114,
"acc_stderr": 0.025125766484827852,
"acc_norm": 0.21132075471698114,
"acc_norm_stderr": 0.025125766484827852
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.2636363636363636,
"acc_stderr": 0.04220224692971987,
"acc_norm": 0.2636363636363636,
"acc_norm_stderr": 0.04220224692971987
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.026962424325073817,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.026962424325073817
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2185430463576159,
"acc_stderr": 0.03374235550425694,
"acc_norm": 0.2185430463576159,
"acc_norm_stderr": 0.03374235550425694
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.23880597014925373,
"acc_stderr": 0.03014777593540922,
"acc_norm": 0.23880597014925373,
"acc_norm_stderr": 0.03014777593540922
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.23699421965317918,
"acc_stderr": 0.03242414757483098,
"acc_norm": 0.23699421965317918,
"acc_norm_stderr": 0.03242414757483098
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.25132275132275134,
"acc_stderr": 0.022340482339643895,
"acc_norm": 0.25132275132275134,
"acc_norm_stderr": 0.022340482339643895
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3055555555555556,
"acc_stderr": 0.03852084696008534,
"acc_norm": 0.3055555555555556,
"acc_norm_stderr": 0.03852084696008534
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.2658959537572254,
"acc_stderr": 0.023786203255508283,
"acc_norm": 0.2658959537572254,
"acc_norm_stderr": 0.023786203255508283
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.2331288343558282,
"acc_stderr": 0.0332201579577674,
"acc_norm": 0.2331288343558282,
"acc_norm_stderr": 0.0332201579577674
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.25617283950617287,
"acc_stderr": 0.0242885336377261,
"acc_norm": 0.25617283950617287,
"acc_norm_stderr": 0.0242885336377261
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.18652849740932642,
"acc_stderr": 0.02811209121011747,
"acc_norm": 0.18652849740932642,
"acc_norm_stderr": 0.02811209121011747
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.21052631578947367,
"acc_stderr": 0.03835153954399421,
"acc_norm": 0.21052631578947367,
"acc_norm_stderr": 0.03835153954399421
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.20733944954128442,
"acc_stderr": 0.017381415563608674,
"acc_norm": 0.20733944954128442,
"acc_norm_stderr": 0.017381415563608674
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.18253968253968253,
"acc_stderr": 0.03455071019102149,
"acc_norm": 0.18253968253968253,
"acc_norm_stderr": 0.03455071019102149
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.023805186524888146,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.023805186524888146
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.2396694214876033,
"acc_stderr": 0.038968789850704164,
"acc_norm": 0.2396694214876033,
"acc_norm_stderr": 0.038968789850704164
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.19736842105263158,
"acc_stderr": 0.03238981601699397,
"acc_norm": 0.19736842105263158,
"acc_norm_stderr": 0.03238981601699397
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.017401816711427657,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.017401816711427657
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2730496453900709,
"acc_stderr": 0.026577860943307857,
"acc_norm": 0.2730496453900709,
"acc_norm_stderr": 0.026577860943307857
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.25892857142857145,
"acc_stderr": 0.041577515398656284,
"acc_norm": 0.25892857142857145,
"acc_norm_stderr": 0.041577515398656284
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.41203703703703703,
"acc_stderr": 0.03356787758160835,
"acc_norm": 0.41203703703703703,
"acc_norm_stderr": 0.03356787758160835
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.264804469273743,
"acc_stderr": 0.014756906483260664,
"acc_norm": 0.264804469273743,
"acc_norm_stderr": 0.014756906483260664
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909284
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.27941176470588236,
"acc_stderr": 0.02725720260611494,
"acc_norm": 0.27941176470588236,
"acc_norm_stderr": 0.02725720260611494
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.2,
"acc_stderr": 0.02560737598657916,
"acc_norm": 0.2,
"acc_norm_stderr": 0.02560737598657916
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.31223628691983124,
"acc_stderr": 0.030165137867847,
"acc_norm": 0.31223628691983124,
"acc_norm_stderr": 0.030165137867847
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.24445893089960888,
"acc_stderr": 0.0109764250131139,
"acc_norm": 0.24445893089960888,
"acc_norm_stderr": 0.0109764250131139
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.02933116229425172,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.02933116229425172
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.23030303030303031,
"acc_stderr": 0.03287666758603487,
"acc_norm": 0.23030303030303031,
"acc_norm_stderr": 0.03287666758603487
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2386780905752754,
"mc1_stderr": 0.014922629695456416,
"mc2": 0.40837934461063286,
"mc2_stderr": 0.014888690859718215
},
"harness|ko_commongen_v2|2": {
"acc": 0.269185360094451,
"acc_stderr": 0.015249098024144538,
"acc_norm": 0.4037780401416765,
"acc_norm_stderr": 0.016869031540298632
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "AIFT/AIFT-instruct-SFT-1.3B-v1.6.2",
"model_sha": "93eb653f28e35bb8e84014db3a5082338b257a3a",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}