results / Alphacode-AI /AlphaMist7B-slr-v4_1 /result_2024-04-10 08:23:33.json
open-ko-llm-bot's picture
Add results for 2024-04-10 08:23:33
3b05bdc verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.46245733788395904,
"acc_stderr": 0.014570144495075576,
"acc_norm": 0.5093856655290102,
"acc_norm_stderr": 0.014608816322065003
},
"harness|ko_hellaswag|10": {
"acc": 0.5617406891057558,
"acc_stderr": 0.004951594063272057,
"acc_norm": 0.6797450707030472,
"acc_norm_stderr": 0.004656208951541448
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5087719298245614,
"acc_stderr": 0.038342347441649924,
"acc_norm": 0.5087719298245614,
"acc_norm_stderr": 0.038342347441649924
},
"harness|ko_mmlu_management|5": {
"acc": 0.6116504854368932,
"acc_stderr": 0.048257293373563895,
"acc_norm": 0.6116504854368932,
"acc_norm_stderr": 0.048257293373563895
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.017867695938429774,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.017867695938429774
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.3851851851851852,
"acc_stderr": 0.042039210401562783,
"acc_norm": 0.3851851851851852,
"acc_norm_stderr": 0.042039210401562783
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.35319148936170214,
"acc_stderr": 0.03124532520276193,
"acc_norm": 0.35319148936170214,
"acc_norm_stderr": 0.03124532520276193
},
"harness|ko_mmlu_virology|5": {
"acc": 0.463855421686747,
"acc_stderr": 0.03882310850890594,
"acc_norm": 0.463855421686747,
"acc_norm_stderr": 0.03882310850890594
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5048231511254019,
"acc_stderr": 0.028396770444111298,
"acc_norm": 0.5048231511254019,
"acc_norm_stderr": 0.028396770444111298
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4349775784753363,
"acc_stderr": 0.03327283370271344,
"acc_norm": 0.4349775784753363,
"acc_norm_stderr": 0.03327283370271344
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.48091603053435117,
"acc_stderr": 0.04382094705550989,
"acc_norm": 0.48091603053435117,
"acc_norm_stderr": 0.04382094705550989
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.55,
"acc_stderr": 0.05,
"acc_norm": 0.55,
"acc_norm_stderr": 0.05
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6212121212121212,
"acc_stderr": 0.03456088731993747,
"acc_norm": 0.6212121212121212,
"acc_norm_stderr": 0.03456088731993747
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.45517241379310347,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.45517241379310347,
"acc_norm_stderr": 0.04149886942192117
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.04389869956808777,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.04389869956808777
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5798319327731093,
"acc_stderr": 0.03206183783236152,
"acc_norm": 0.5798319327731093,
"acc_norm_stderr": 0.03206183783236152
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5307692307692308,
"acc_stderr": 0.025302958890850158,
"acc_norm": 0.5307692307692308,
"acc_norm_stderr": 0.025302958890850158
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.0483036602463533,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.0483036602463533
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.03481904844438803,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.03481904844438803
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.47096774193548385,
"acc_stderr": 0.028396016402761008,
"acc_norm": 0.47096774193548385,
"acc_norm_stderr": 0.028396016402761008
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6752136752136753,
"acc_stderr": 0.03067902276549883,
"acc_norm": 0.6752136752136753,
"acc_norm_stderr": 0.03067902276549883
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4377358490566038,
"acc_stderr": 0.030533338430467523,
"acc_norm": 0.4377358490566038,
"acc_norm_stderr": 0.030533338430467523
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5272727272727272,
"acc_stderr": 0.04782001791380061,
"acc_norm": 0.5272727272727272,
"acc_norm_stderr": 0.04782001791380061
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.28888888888888886,
"acc_stderr": 0.02763490726417854,
"acc_norm": 0.28888888888888886,
"acc_norm_stderr": 0.02763490726417854
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2847682119205298,
"acc_stderr": 0.03684881521389023,
"acc_norm": 0.2847682119205298,
"acc_norm_stderr": 0.03684881521389023
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5671641791044776,
"acc_stderr": 0.03503490923673281,
"acc_norm": 0.5671641791044776,
"acc_norm_stderr": 0.03503490923673281
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3988439306358382,
"acc_stderr": 0.03733626655383509,
"acc_norm": 0.3988439306358382,
"acc_norm_stderr": 0.03733626655383509
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.36243386243386244,
"acc_stderr": 0.024757473902752052,
"acc_norm": 0.36243386243386244,
"acc_norm_stderr": 0.024757473902752052
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3611111111111111,
"acc_stderr": 0.040166600304512336,
"acc_norm": 0.3611111111111111,
"acc_norm_stderr": 0.040166600304512336
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.62,
"acc_stderr": 0.04878317312145634,
"acc_norm": 0.62,
"acc_norm_stderr": 0.04878317312145634
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.49710982658959535,
"acc_stderr": 0.026918645383239004,
"acc_norm": 0.49710982658959535,
"acc_norm_stderr": 0.026918645383239004
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.49693251533742333,
"acc_stderr": 0.03928297078179663,
"acc_norm": 0.49693251533742333,
"acc_norm_stderr": 0.03928297078179663
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4691358024691358,
"acc_stderr": 0.027767689606833935,
"acc_norm": 0.4691358024691358,
"acc_norm_stderr": 0.027767689606833935
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421296,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421296
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5803108808290155,
"acc_stderr": 0.03561587327685884,
"acc_norm": 0.5803108808290155,
"acc_norm_stderr": 0.03561587327685884
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.04303684033537317,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.04303684033537317
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5357798165137615,
"acc_stderr": 0.021382364775701896,
"acc_norm": 0.5357798165137615,
"acc_norm_stderr": 0.021382364775701896
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.4603174603174603,
"acc_stderr": 0.04458029125470973,
"acc_norm": 0.4603174603174603,
"acc_norm_stderr": 0.04458029125470973
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5098039215686274,
"acc_stderr": 0.02862441255016795,
"acc_norm": 0.5098039215686274,
"acc_norm_stderr": 0.02862441255016795
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.52,
"acc_stderr": 0.05021167315686779,
"acc_norm": 0.52,
"acc_norm_stderr": 0.05021167315686779
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6694214876033058,
"acc_stderr": 0.04294340845212094,
"acc_norm": 0.6694214876033058,
"acc_norm_stderr": 0.04294340845212094
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4342105263157895,
"acc_stderr": 0.04033565667848319,
"acc_norm": 0.4342105263157895,
"acc_norm_stderr": 0.04033565667848319
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.4019607843137255,
"acc_stderr": 0.01983517648437538,
"acc_norm": 0.4019607843137255,
"acc_norm_stderr": 0.01983517648437538
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.30851063829787234,
"acc_stderr": 0.02755336616510137,
"acc_norm": 0.30851063829787234,
"acc_norm_stderr": 0.02755336616510137
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.33035714285714285,
"acc_stderr": 0.04464285714285714,
"acc_norm": 0.33035714285714285,
"acc_norm_stderr": 0.04464285714285714
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.42592592592592593,
"acc_stderr": 0.033723432716530624,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.033723432716530624
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.23910614525139665,
"acc_stderr": 0.014265554192331165,
"acc_norm": 0.23910614525139665,
"acc_norm_stderr": 0.014265554192331165
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237102
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.45955882352941174,
"acc_stderr": 0.03027332507734576,
"acc_norm": 0.45955882352941174,
"acc_norm_stderr": 0.03027332507734576
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5469387755102041,
"acc_stderr": 0.031867859300041275,
"acc_norm": 0.5469387755102041,
"acc_norm_stderr": 0.031867859300041275
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5654008438818565,
"acc_stderr": 0.03226759995510144,
"acc_norm": 0.5654008438818565,
"acc_norm_stderr": 0.03226759995510144
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3435462842242503,
"acc_stderr": 0.012128961174190156,
"acc_norm": 0.3435462842242503,
"acc_norm_stderr": 0.012128961174190156
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.44607843137254904,
"acc_stderr": 0.03488845451304974,
"acc_norm": 0.44607843137254904,
"acc_norm_stderr": 0.03488845451304974
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.4303030303030303,
"acc_stderr": 0.03866225962879077,
"acc_norm": 0.4303030303030303,
"acc_norm_stderr": 0.03866225962879077
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.40269277845777235,
"mc1_stderr": 0.017168830935187222,
"mc2": 0.5704428288497218,
"mc2_stderr": 0.01623760406081459
},
"harness|ko_commongen_v2|2": {
"acc": 0.4852420306965762,
"acc_stderr": 0.01718286443499856,
"acc_norm": 0.5171192443919717,
"acc_norm_stderr": 0.01718027524608563
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Alphacode-AI/AlphaMist7B-slr-v4_1",
"model_sha": "ea74366badcf5b383d55ed046644e877311926c0",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}