results / AIFT /AIFT-instruct-SFT-dpo-1.3B-v1.1 /result_2024-02-22 23:51:45.json
open-ko-llm-bot's picture
Add results for 2024-02-22 23:51:45
426dd66 verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.27474402730375425,
"acc_stderr": 0.013044617212771227,
"acc_norm": 0.3412969283276451,
"acc_norm_stderr": 0.013855831287497723
},
"harness|ko_hellaswag|10": {
"acc": 0.36128261302529374,
"acc_stderr": 0.004793904922401889,
"acc_norm": 0.4475204142601075,
"acc_norm_stderr": 0.0049622205125483595
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.29239766081871343,
"acc_stderr": 0.03488647713457922,
"acc_norm": 0.29239766081871343,
"acc_norm_stderr": 0.03488647713457922
},
"harness|ko_mmlu_management|5": {
"acc": 0.2524271844660194,
"acc_stderr": 0.04301250399690877,
"acc_norm": 0.2524271844660194,
"acc_norm_stderr": 0.04301250399690877
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.24010217113665389,
"acc_stderr": 0.015274685213734193,
"acc_norm": 0.24010217113665389,
"acc_norm_stderr": 0.015274685213734193
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.035914440841969694,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.035914440841969694
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2936170212765957,
"acc_stderr": 0.02977164271249123,
"acc_norm": 0.2936170212765957,
"acc_norm_stderr": 0.02977164271249123
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3313253012048193,
"acc_stderr": 0.03664314777288085,
"acc_norm": 0.3313253012048193,
"acc_norm_stderr": 0.03664314777288085
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.26366559485530544,
"acc_stderr": 0.02502553850053234,
"acc_norm": 0.26366559485530544,
"acc_norm_stderr": 0.02502553850053234
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.2914798206278027,
"acc_stderr": 0.030500283176545913,
"acc_norm": 0.2914798206278027,
"acc_norm_stderr": 0.030500283176545913
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.25190839694656486,
"acc_stderr": 0.03807387116306086,
"acc_norm": 0.25190839694656486,
"acc_norm_stderr": 0.03807387116306086
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768077,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768077
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.17676767676767677,
"acc_stderr": 0.027178752639044915,
"acc_norm": 0.17676767676767677,
"acc_norm_stderr": 0.027178752639044915
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03565998174135302,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.03565998174135302
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.04280105837364395,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.04280105837364395
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.2184873949579832,
"acc_stderr": 0.02684151432295893,
"acc_norm": 0.2184873949579832,
"acc_norm_stderr": 0.02684151432295893
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2153846153846154,
"acc_stderr": 0.020843034557462878,
"acc_norm": 0.2153846153846154,
"acc_norm_stderr": 0.020843034557462878
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.34,
"acc_stderr": 0.047609522856952344,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952344
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.17,
"acc_stderr": 0.03775251680686371,
"acc_norm": 0.17,
"acc_norm_stderr": 0.03775251680686371
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.04236511258094633,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.04236511258094633
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2315270935960591,
"acc_stderr": 0.02967833314144446,
"acc_norm": 0.2315270935960591,
"acc_norm_stderr": 0.02967833314144446
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2161290322580645,
"acc_stderr": 0.02341529343356853,
"acc_norm": 0.2161290322580645,
"acc_norm_stderr": 0.02341529343356853
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.24358974358974358,
"acc_stderr": 0.0281209665039144,
"acc_norm": 0.24358974358974358,
"acc_norm_stderr": 0.0281209665039144
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.21509433962264152,
"acc_stderr": 0.025288394502891373,
"acc_norm": 0.21509433962264152,
"acc_norm_stderr": 0.025288394502891373
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.34545454545454546,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.34545454545454546,
"acc_norm_stderr": 0.04554619617541054
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2814814814814815,
"acc_stderr": 0.027420019350945277,
"acc_norm": 0.2814814814814815,
"acc_norm_stderr": 0.027420019350945277
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2119205298013245,
"acc_stderr": 0.033367670865679766,
"acc_norm": 0.2119205298013245,
"acc_norm_stderr": 0.033367670865679766
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.25870646766169153,
"acc_stderr": 0.03096590312357304,
"acc_norm": 0.25870646766169153,
"acc_norm_stderr": 0.03096590312357304
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.20809248554913296,
"acc_stderr": 0.03095289021774988,
"acc_norm": 0.20809248554913296,
"acc_norm_stderr": 0.03095289021774988
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2328042328042328,
"acc_stderr": 0.02176596167215454,
"acc_norm": 0.2328042328042328,
"acc_norm_stderr": 0.02176596167215454
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.22916666666666666,
"acc_stderr": 0.03514697467862388,
"acc_norm": 0.22916666666666666,
"acc_norm_stderr": 0.03514697467862388
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.26,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.26,
"acc_norm_stderr": 0.044084400227680794
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.23410404624277456,
"acc_stderr": 0.022797110278071134,
"acc_norm": 0.23410404624277456,
"acc_norm_stderr": 0.022797110278071134
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.22699386503067484,
"acc_stderr": 0.03291099578615771,
"acc_norm": 0.22699386503067484,
"acc_norm_stderr": 0.03291099578615771
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.2623456790123457,
"acc_stderr": 0.024477222856135114,
"acc_norm": 0.2623456790123457,
"acc_norm_stderr": 0.024477222856135114
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.20207253886010362,
"acc_stderr": 0.02897908979429673,
"acc_norm": 0.20207253886010362,
"acc_norm_stderr": 0.02897908979429673
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.20175438596491227,
"acc_stderr": 0.03775205013583638,
"acc_norm": 0.20175438596491227,
"acc_norm_stderr": 0.03775205013583638
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.1981651376146789,
"acc_stderr": 0.01709057380421789,
"acc_norm": 0.1981651376146789,
"acc_norm_stderr": 0.01709057380421789
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.18253968253968253,
"acc_stderr": 0.034550710191021496,
"acc_norm": 0.18253968253968253,
"acc_norm_stderr": 0.034550710191021496
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.22875816993464052,
"acc_stderr": 0.024051029739912255,
"acc_norm": 0.22875816993464052,
"acc_norm_stderr": 0.024051029739912255
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.2727272727272727,
"acc_stderr": 0.04065578140908705,
"acc_norm": 0.2727272727272727,
"acc_norm_stderr": 0.04065578140908705
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.17763157894736842,
"acc_stderr": 0.03110318238312338,
"acc_norm": 0.17763157894736842,
"acc_norm_stderr": 0.03110318238312338
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.25163398692810457,
"acc_stderr": 0.017555818091322267,
"acc_norm": 0.25163398692810457,
"acc_norm_stderr": 0.017555818091322267
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2375886524822695,
"acc_stderr": 0.025389512552729906,
"acc_norm": 0.2375886524822695,
"acc_norm_stderr": 0.025389512552729906
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.29464285714285715,
"acc_stderr": 0.043270409325787296,
"acc_norm": 0.29464285714285715,
"acc_norm_stderr": 0.043270409325787296
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.24537037037037038,
"acc_stderr": 0.02934666509437294,
"acc_norm": 0.24537037037037038,
"acc_norm_stderr": 0.02934666509437294
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.25027932960893856,
"acc_stderr": 0.014487500852850426,
"acc_norm": 0.25027932960893856,
"acc_norm_stderr": 0.014487500852850426
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.17647058823529413,
"acc_stderr": 0.02315746830855935,
"acc_norm": 0.17647058823529413,
"acc_norm_stderr": 0.02315746830855935
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.19183673469387755,
"acc_stderr": 0.025206963154225378,
"acc_norm": 0.19183673469387755,
"acc_norm_stderr": 0.025206963154225378
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.2742616033755274,
"acc_stderr": 0.029041333510598046,
"acc_norm": 0.2742616033755274,
"acc_norm_stderr": 0.029041333510598046
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2392438070404172,
"acc_stderr": 0.010896123652676651,
"acc_norm": 0.2392438070404172,
"acc_norm_stderr": 0.010896123652676651
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.24019607843137256,
"acc_stderr": 0.02998373305591361,
"acc_norm": 0.24019607843137256,
"acc_norm_stderr": 0.02998373305591361
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.2606060606060606,
"acc_stderr": 0.03427743175816525,
"acc_norm": 0.2606060606060606,
"acc_norm_stderr": 0.03427743175816525
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.24969400244798043,
"mc1_stderr": 0.015152286907148123,
"mc2": 0.4130446713954393,
"mc2_stderr": 0.014977317476214325
},
"harness|ko_commongen_v2|2": {
"acc": 0.30932703659976385,
"acc_stderr": 0.015891320505520886,
"acc_norm": 0.41204250295159384,
"acc_norm_stderr": 0.01692227673852836
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "AIFT/AIFT-instruct-SFT-dpo-1.3B-v1.1",
"model_sha": "56d98539706359a035a379ae5461cb34620ab5b2",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}