results / Alphacode-AI /AlphaMist7B-slr-v4-slow2 /result_2024-05-13 01:43:05.json
open-ko-llm-bot's picture
Add results for 2024-05-13 01:43:05
7000738 verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.48890784982935154,
"acc_stderr": 0.01460779491401306,
"acc_norm": 0.53839590443686,
"acc_norm_stderr": 0.01456824555029636
},
"harness|ko_hellaswag|10": {
"acc": 0.6321449910376419,
"acc_stderr": 0.004812361060493917,
"acc_norm": 0.7341167098187612,
"acc_norm_stderr": 0.004408994868650099
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.49707602339181284,
"acc_stderr": 0.03834759370936839,
"acc_norm": 0.49707602339181284,
"acc_norm_stderr": 0.03834759370936839
},
"harness|ko_mmlu_management|5": {
"acc": 0.6116504854368932,
"acc_stderr": 0.048257293373563895,
"acc_norm": 0.6116504854368932,
"acc_norm_stderr": 0.048257293373563895
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5134099616858238,
"acc_stderr": 0.017873531736510368,
"acc_norm": 0.5134099616858238,
"acc_norm_stderr": 0.017873531736510368
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34814814814814815,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.34814814814814815,
"acc_norm_stderr": 0.041153246103369526
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.37446808510638296,
"acc_stderr": 0.031639106653672915,
"acc_norm": 0.37446808510638296,
"acc_norm_stderr": 0.031639106653672915
},
"harness|ko_mmlu_virology|5": {
"acc": 0.43373493975903615,
"acc_stderr": 0.03858158940685516,
"acc_norm": 0.43373493975903615,
"acc_norm_stderr": 0.03858158940685516
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.47266881028938906,
"acc_stderr": 0.028355633568328188,
"acc_norm": 0.47266881028938906,
"acc_norm_stderr": 0.028355633568328188
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4663677130044843,
"acc_stderr": 0.033481800170603065,
"acc_norm": 0.4663677130044843,
"acc_norm_stderr": 0.033481800170603065
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.46564885496183206,
"acc_stderr": 0.043749285605997376,
"acc_norm": 0.46564885496183206,
"acc_norm_stderr": 0.043749285605997376
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.55,
"acc_stderr": 0.05,
"acc_norm": 0.55,
"acc_norm_stderr": 0.05
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6111111111111112,
"acc_stderr": 0.0347327959083696,
"acc_norm": 0.6111111111111112,
"acc_norm_stderr": 0.0347327959083696
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4,
"acc_stderr": 0.040824829046386284,
"acc_norm": 0.4,
"acc_norm_stderr": 0.040824829046386284
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.29411764705882354,
"acc_stderr": 0.04533838195929777,
"acc_norm": 0.29411764705882354,
"acc_norm_stderr": 0.04533838195929777
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5882352941176471,
"acc_stderr": 0.031968769891957786,
"acc_norm": 0.5882352941176471,
"acc_norm_stderr": 0.031968769891957786
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5282051282051282,
"acc_stderr": 0.02531063925493387,
"acc_norm": 0.5282051282051282,
"acc_norm_stderr": 0.02531063925493387
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.48,
"acc_stderr": 0.05021167315686781,
"acc_norm": 0.48,
"acc_norm_stderr": 0.05021167315686781
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.0483036602463533,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.0483036602463533
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4433497536945813,
"acc_stderr": 0.03495334582162933,
"acc_norm": 0.4433497536945813,
"acc_norm_stderr": 0.03495334582162933
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4870967741935484,
"acc_stderr": 0.028434533152681855,
"acc_norm": 0.4870967741935484,
"acc_norm_stderr": 0.028434533152681855
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.030882736974138656,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.030882736974138656
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.42641509433962266,
"acc_stderr": 0.030437794342983052,
"acc_norm": 0.42641509433962266,
"acc_norm_stderr": 0.030437794342983052
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5363636363636364,
"acc_stderr": 0.04776449162396197,
"acc_norm": 0.5363636363636364,
"acc_norm_stderr": 0.04776449162396197
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2740740740740741,
"acc_stderr": 0.027195934804085626,
"acc_norm": 0.2740740740740741,
"acc_norm_stderr": 0.027195934804085626
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2913907284768212,
"acc_stderr": 0.037101857261199946,
"acc_norm": 0.2913907284768212,
"acc_norm_stderr": 0.037101857261199946
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5771144278606966,
"acc_stderr": 0.034932317774212816,
"acc_norm": 0.5771144278606966,
"acc_norm_stderr": 0.034932317774212816
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3815028901734104,
"acc_stderr": 0.03703851193099521,
"acc_norm": 0.3815028901734104,
"acc_norm_stderr": 0.03703851193099521
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3544973544973545,
"acc_stderr": 0.024636830602842,
"acc_norm": 0.3544973544973545,
"acc_norm_stderr": 0.024636830602842
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3611111111111111,
"acc_stderr": 0.040166600304512336,
"acc_norm": 0.3611111111111111,
"acc_norm_stderr": 0.040166600304512336
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.56,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.56,
"acc_norm_stderr": 0.049888765156985884
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5028901734104047,
"acc_stderr": 0.02691864538323901,
"acc_norm": 0.5028901734104047,
"acc_norm_stderr": 0.02691864538323901
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.49079754601226994,
"acc_stderr": 0.03927705600787443,
"acc_norm": 0.49079754601226994,
"acc_norm_stderr": 0.03927705600787443
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4845679012345679,
"acc_stderr": 0.027807490044276198,
"acc_norm": 0.4845679012345679,
"acc_norm_stderr": 0.027807490044276198
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5440414507772021,
"acc_stderr": 0.03594413711272436,
"acc_norm": 0.5440414507772021,
"acc_norm_stderr": 0.03594413711272436
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.32456140350877194,
"acc_stderr": 0.044045561573747685,
"acc_norm": 0.32456140350877194,
"acc_norm_stderr": 0.044045561573747685
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5394495412844037,
"acc_stderr": 0.021370494609995093,
"acc_norm": 0.5394495412844037,
"acc_norm_stderr": 0.021370494609995093
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.42063492063492064,
"acc_stderr": 0.04415438226743743,
"acc_norm": 0.42063492063492064,
"acc_norm_stderr": 0.04415438226743743
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.49673202614379086,
"acc_stderr": 0.02862930519400354,
"acc_norm": 0.49673202614379086,
"acc_norm_stderr": 0.02862930519400354
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.52,
"acc_stderr": 0.05021167315686779,
"acc_norm": 0.52,
"acc_norm_stderr": 0.05021167315686779
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6776859504132231,
"acc_stderr": 0.04266416363352168,
"acc_norm": 0.6776859504132231,
"acc_norm_stderr": 0.04266416363352168
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4473684210526316,
"acc_stderr": 0.04046336883978251,
"acc_norm": 0.4473684210526316,
"acc_norm_stderr": 0.04046336883978251
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.4068627450980392,
"acc_stderr": 0.01987380200506118,
"acc_norm": 0.4068627450980392,
"acc_norm_stderr": 0.01987380200506118
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.31560283687943264,
"acc_stderr": 0.027724989449509314,
"acc_norm": 0.31560283687943264,
"acc_norm_stderr": 0.027724989449509314
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.33035714285714285,
"acc_stderr": 0.04464285714285713,
"acc_norm": 0.33035714285714285,
"acc_norm_stderr": 0.04464285714285713
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4305555555555556,
"acc_stderr": 0.03376922151252335,
"acc_norm": 0.4305555555555556,
"acc_norm_stderr": 0.03376922151252335
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2569832402234637,
"acc_stderr": 0.014614465821966346,
"acc_norm": 0.2569832402234637,
"acc_norm_stderr": 0.014614465821966346
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.45588235294117646,
"acc_stderr": 0.03025437257397669,
"acc_norm": 0.45588235294117646,
"acc_norm_stderr": 0.03025437257397669
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5591836734693878,
"acc_stderr": 0.03178419114175363,
"acc_norm": 0.5591836734693878,
"acc_norm_stderr": 0.03178419114175363
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5949367088607594,
"acc_stderr": 0.031955147413706725,
"acc_norm": 0.5949367088607594,
"acc_norm_stderr": 0.031955147413706725
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3520208604954368,
"acc_stderr": 0.012198140605353609,
"acc_norm": 0.3520208604954368,
"acc_norm_stderr": 0.012198140605353609
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.47058823529411764,
"acc_stderr": 0.03503235296367993,
"acc_norm": 0.47058823529411764,
"acc_norm_stderr": 0.03503235296367993
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.4727272727272727,
"acc_stderr": 0.03898531605579419,
"acc_norm": 0.4727272727272727,
"acc_norm_stderr": 0.03898531605579419
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.4785801713586291,
"mc1_stderr": 0.017487432144711806,
"mc2": 0.621189713542375,
"mc2_stderr": 0.016320011615209882
},
"harness|ko_commongen_v2|2": {
"acc": 0.4805194805194805,
"acc_stderr": 0.017177301992342544,
"acc_norm": 0.5182998819362455,
"acc_norm_stderr": 0.017178836639177752
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Alphacode-AI/AlphaMist7B-slr-v4-slow2",
"model_sha": "672cbca0feabcfba309c030ea54304b7fd44a292",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}