results / AIFT /AIFT-instruct-42dot_LLM-SFT-1.3B-dpo /result_2024-01-30 03:46:37.json
open-ko-llm-bot's picture
Add results for 2024-01-30 03:46:37
c078312 verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.27474402730375425,
"acc_stderr": 0.013044617212771227,
"acc_norm": 0.3361774744027304,
"acc_norm_stderr": 0.01380485502620576
},
"harness|ko_hellaswag|10": {
"acc": 0.3577972515435172,
"acc_stderr": 0.0047837237982865,
"acc_norm": 0.4455287791276638,
"acc_norm_stderr": 0.004960082528852433
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.28654970760233917,
"acc_stderr": 0.034678266857038245,
"acc_norm": 0.28654970760233917,
"acc_norm_stderr": 0.034678266857038245
},
"harness|ko_mmlu_management|5": {
"acc": 0.2621359223300971,
"acc_stderr": 0.04354631077260597,
"acc_norm": 0.2621359223300971,
"acc_norm_stderr": 0.04354631077260597
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.21966794380587484,
"acc_stderr": 0.014805384478371169,
"acc_norm": 0.21966794380587484,
"acc_norm_stderr": 0.014805384478371169
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.1925925925925926,
"acc_stderr": 0.03406542058502652,
"acc_norm": 0.1925925925925926,
"acc_norm_stderr": 0.03406542058502652
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.23829787234042554,
"acc_stderr": 0.027851252973889774,
"acc_norm": 0.23829787234042554,
"acc_norm_stderr": 0.027851252973889774
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3072289156626506,
"acc_stderr": 0.035915667978246635,
"acc_norm": 0.3072289156626506,
"acc_norm_stderr": 0.035915667978246635
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.2765273311897106,
"acc_stderr": 0.025403832978179622,
"acc_norm": 0.2765273311897106,
"acc_norm_stderr": 0.025403832978179622
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.21973094170403587,
"acc_stderr": 0.02779017706438359,
"acc_norm": 0.21973094170403587,
"acc_norm_stderr": 0.02779017706438359
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.24427480916030533,
"acc_stderr": 0.037683359597287434,
"acc_norm": 0.24427480916030533,
"acc_norm_stderr": 0.037683359597287434
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.18686868686868688,
"acc_stderr": 0.027772533334218967,
"acc_norm": 0.18686868686868688,
"acc_norm_stderr": 0.027772533334218967
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.22758620689655173,
"acc_stderr": 0.03493950380131184,
"acc_norm": 0.22758620689655173,
"acc_norm_stderr": 0.03493950380131184
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.27450980392156865,
"acc_stderr": 0.04440521906179326,
"acc_norm": 0.27450980392156865,
"acc_norm_stderr": 0.04440521906179326
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.2184873949579832,
"acc_stderr": 0.02684151432295893,
"acc_norm": 0.2184873949579832,
"acc_norm_stderr": 0.02684151432295893
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.19230769230769232,
"acc_stderr": 0.019982347208637292,
"acc_norm": 0.19230769230769232,
"acc_norm_stderr": 0.019982347208637292
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.26851851851851855,
"acc_stderr": 0.04284467968052192,
"acc_norm": 0.26851851851851855,
"acc_norm_stderr": 0.04284467968052192
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.32019704433497537,
"acc_stderr": 0.03282649385304151,
"acc_norm": 0.32019704433497537,
"acc_norm_stderr": 0.03282649385304151
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3,
"acc_stderr": 0.02606936229533513,
"acc_norm": 0.3,
"acc_norm_stderr": 0.02606936229533513
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.029343114798094462,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.029343114798094462
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.22264150943396227,
"acc_stderr": 0.025604233470899095,
"acc_norm": 0.22264150943396227,
"acc_norm_stderr": 0.025604233470899095
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.32727272727272727,
"acc_stderr": 0.04494290866252089,
"acc_norm": 0.32727272727272727,
"acc_norm_stderr": 0.04494290866252089
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.026067159222275798,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.026067159222275798
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2781456953642384,
"acc_stderr": 0.036586032627637426,
"acc_norm": 0.2781456953642384,
"acc_norm_stderr": 0.036586032627637426
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.24875621890547264,
"acc_stderr": 0.030567675938916707,
"acc_norm": 0.24875621890547264,
"acc_norm_stderr": 0.030567675938916707
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.26011560693641617,
"acc_stderr": 0.03345036916788989,
"acc_norm": 0.26011560693641617,
"acc_norm_stderr": 0.03345036916788989
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.24867724867724866,
"acc_stderr": 0.02226181769240018,
"acc_norm": 0.24867724867724866,
"acc_norm_stderr": 0.02226181769240018
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.24305555555555555,
"acc_stderr": 0.03586879280080341,
"acc_norm": 0.24305555555555555,
"acc_norm_stderr": 0.03586879280080341
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542126,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542126
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.2774566473988439,
"acc_stderr": 0.024105712607754307,
"acc_norm": 0.2774566473988439,
"acc_norm_stderr": 0.024105712607754307
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.22085889570552147,
"acc_stderr": 0.032591773927421776,
"acc_norm": 0.22085889570552147,
"acc_norm_stderr": 0.032591773927421776
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.2654320987654321,
"acc_stderr": 0.02456922360046085,
"acc_norm": 0.2654320987654321,
"acc_norm_stderr": 0.02456922360046085
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.21243523316062177,
"acc_stderr": 0.02951928261681725,
"acc_norm": 0.21243523316062177,
"acc_norm_stderr": 0.02951928261681725
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2719298245614035,
"acc_stderr": 0.041857744240220575,
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.041857744240220575
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.2,
"acc_stderr": 0.017149858514250944,
"acc_norm": 0.2,
"acc_norm_stderr": 0.017149858514250944
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3253968253968254,
"acc_stderr": 0.041905964388711366,
"acc_norm": 0.3253968253968254,
"acc_norm_stderr": 0.041905964388711366
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2973856209150327,
"acc_stderr": 0.026173908506718576,
"acc_norm": 0.2973856209150327,
"acc_norm_stderr": 0.026173908506718576
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909284
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.3305785123966942,
"acc_stderr": 0.04294340845212095,
"acc_norm": 0.3305785123966942,
"acc_norm_stderr": 0.04294340845212095
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.2565789473684211,
"acc_stderr": 0.0355418036802569,
"acc_norm": 0.2565789473684211,
"acc_norm_stderr": 0.0355418036802569
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.25163398692810457,
"acc_stderr": 0.01755581809132227,
"acc_norm": 0.25163398692810457,
"acc_norm_stderr": 0.01755581809132227
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.24468085106382978,
"acc_stderr": 0.025645553622266726,
"acc_norm": 0.24468085106382978,
"acc_norm_stderr": 0.025645553622266726
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.21428571428571427,
"acc_stderr": 0.03894641120044792,
"acc_norm": 0.21428571428571427,
"acc_norm_stderr": 0.03894641120044792
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4351851851851852,
"acc_stderr": 0.033812000056435254,
"acc_norm": 0.4351851851851852,
"acc_norm_stderr": 0.033812000056435254
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.27262569832402234,
"acc_stderr": 0.014893391735249608,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249608
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3492647058823529,
"acc_stderr": 0.028959755196824862,
"acc_norm": 0.3492647058823529,
"acc_norm_stderr": 0.028959755196824862
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.19591836734693877,
"acc_stderr": 0.02540930195322568,
"acc_norm": 0.19591836734693877,
"acc_norm_stderr": 0.02540930195322568
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.29535864978902954,
"acc_stderr": 0.029696338713422882,
"acc_norm": 0.29535864978902954,
"acc_norm_stderr": 0.029696338713422882
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2457627118644068,
"acc_stderr": 0.010996156635142692,
"acc_norm": 0.2457627118644068,
"acc_norm_stderr": 0.010996156635142692
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.2696078431372549,
"acc_stderr": 0.03114557065948678,
"acc_norm": 0.2696078431372549,
"acc_norm_stderr": 0.03114557065948678
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.24848484848484848,
"acc_stderr": 0.03374402644139404,
"acc_norm": 0.24848484848484848,
"acc_norm_stderr": 0.03374402644139404
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.25703794369645044,
"mc1_stderr": 0.015298077509485083,
"mc2": 0.4174368957869544,
"mc2_stderr": 0.015294388765459724
},
"harness|ko_commongen_v2|2": {
"acc": 0.3317591499409681,
"acc_stderr": 0.01618798464215732,
"acc_norm": 0.4462809917355372,
"acc_norm_stderr": 0.017090852631668332
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "AIFT/AIFT-instruct-42dot_LLM-SFT-1.3B-dpo",
"model_sha": "e9e27e2063046b74476dadb9af3eb45e8786310c",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}