results / Alphacode-AI-Team /Alpha-LLM-Mistral7B-v2 /result_2024-04-04 06:59:03.json
open-ko-llm-bot's picture
Add results for 2024-04-04 06:59:03
aee0530 verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.4761092150170648,
"acc_stderr": 0.014594701798071657,
"acc_norm": 0.515358361774744,
"acc_norm_stderr": 0.014604496129394916
},
"harness|ko_hellaswag|10": {
"acc": 0.5443138816968731,
"acc_stderr": 0.004970145708188002,
"acc_norm": 0.6666998605855408,
"acc_norm_stderr": 0.004704293898729907
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4853801169590643,
"acc_stderr": 0.038331852752130205,
"acc_norm": 0.4853801169590643,
"acc_norm_stderr": 0.038331852752130205
},
"harness|ko_mmlu_management|5": {
"acc": 0.5825242718446602,
"acc_stderr": 0.048828405482122375,
"acc_norm": 0.5825242718446602,
"acc_norm_stderr": 0.048828405482122375
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5159642401021711,
"acc_stderr": 0.01787084750608174,
"acc_norm": 0.5159642401021711,
"acc_norm_stderr": 0.01787084750608174
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34814814814814815,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.34814814814814815,
"acc_norm_stderr": 0.041153246103369526
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3829787234042553,
"acc_stderr": 0.03177821250236922,
"acc_norm": 0.3829787234042553,
"acc_norm_stderr": 0.03177821250236922
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4397590361445783,
"acc_stderr": 0.03864139923699121,
"acc_norm": 0.4397590361445783,
"acc_norm_stderr": 0.03864139923699121
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5048231511254019,
"acc_stderr": 0.028396770444111305,
"acc_norm": 0.5048231511254019,
"acc_norm_stderr": 0.028396770444111305
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.45739910313901344,
"acc_stderr": 0.03343577705583064,
"acc_norm": 0.45739910313901344,
"acc_norm_stderr": 0.03343577705583064
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.46564885496183206,
"acc_stderr": 0.043749285605997376,
"acc_norm": 0.46564885496183206,
"acc_norm_stderr": 0.043749285605997376
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6212121212121212,
"acc_stderr": 0.03456088731993747,
"acc_norm": 0.6212121212121212,
"acc_norm_stderr": 0.03456088731993747
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.46206896551724136,
"acc_stderr": 0.041546596717075474,
"acc_norm": 0.46206896551724136,
"acc_norm_stderr": 0.041546596717075474
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3627450980392157,
"acc_stderr": 0.047840607041056527,
"acc_norm": 0.3627450980392157,
"acc_norm_stderr": 0.047840607041056527
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.592436974789916,
"acc_stderr": 0.03191863374478466,
"acc_norm": 0.592436974789916,
"acc_norm_stderr": 0.03191863374478466
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5307692307692308,
"acc_stderr": 0.025302958890850154,
"acc_norm": 0.5307692307692308,
"acc_norm_stderr": 0.025302958890850154
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5462962962962963,
"acc_stderr": 0.04812917324536823,
"acc_norm": 0.5462962962962963,
"acc_norm_stderr": 0.04812917324536823
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.03481904844438803,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.03481904844438803
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4774193548387097,
"acc_stderr": 0.02841498501970786,
"acc_norm": 0.4774193548387097,
"acc_norm_stderr": 0.02841498501970786
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6709401709401709,
"acc_stderr": 0.030782321577688173,
"acc_norm": 0.6709401709401709,
"acc_norm_stderr": 0.030782321577688173
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.41132075471698115,
"acc_stderr": 0.030285009259009787,
"acc_norm": 0.41132075471698115,
"acc_norm_stderr": 0.030285009259009787
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5454545454545454,
"acc_stderr": 0.04769300568972744,
"acc_norm": 0.5454545454545454,
"acc_norm_stderr": 0.04769300568972744
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2740740740740741,
"acc_stderr": 0.027195934804085626,
"acc_norm": 0.2740740740740741,
"acc_norm_stderr": 0.027195934804085626
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.037579499229433426,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.037579499229433426
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5870646766169154,
"acc_stderr": 0.03481520803367348,
"acc_norm": 0.5870646766169154,
"acc_norm_stderr": 0.03481520803367348
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3583815028901734,
"acc_stderr": 0.036563436533531585,
"acc_norm": 0.3583815028901734,
"acc_norm_stderr": 0.036563436533531585
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3492063492063492,
"acc_stderr": 0.024552292209342665,
"acc_norm": 0.3492063492063492,
"acc_norm_stderr": 0.024552292209342665
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3958333333333333,
"acc_stderr": 0.04089465449325582,
"acc_norm": 0.3958333333333333,
"acc_norm_stderr": 0.04089465449325582
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.57,
"acc_stderr": 0.04975698519562427,
"acc_norm": 0.57,
"acc_norm_stderr": 0.04975698519562427
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5086705202312138,
"acc_stderr": 0.026915047355369804,
"acc_norm": 0.5086705202312138,
"acc_norm_stderr": 0.026915047355369804
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5030674846625767,
"acc_stderr": 0.03928297078179663,
"acc_norm": 0.5030674846625767,
"acc_norm_stderr": 0.03928297078179663
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.49074074074074076,
"acc_stderr": 0.027815973433878014,
"acc_norm": 0.49074074074074076,
"acc_norm_stderr": 0.027815973433878014
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5595854922279793,
"acc_stderr": 0.035827245300360945,
"acc_norm": 0.5595854922279793,
"acc_norm_stderr": 0.035827245300360945
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.32456140350877194,
"acc_stderr": 0.044045561573747685,
"acc_norm": 0.32456140350877194,
"acc_norm_stderr": 0.044045561573747685
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5357798165137615,
"acc_stderr": 0.021382364775701893,
"acc_norm": 0.5357798165137615,
"acc_norm_stderr": 0.021382364775701893
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.40476190476190477,
"acc_stderr": 0.043902592653775614,
"acc_norm": 0.40476190476190477,
"acc_norm_stderr": 0.043902592653775614
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.49019607843137253,
"acc_stderr": 0.028624412550167965,
"acc_norm": 0.49019607843137253,
"acc_norm_stderr": 0.028624412550167965
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6694214876033058,
"acc_stderr": 0.04294340845212094,
"acc_norm": 0.6694214876033058,
"acc_norm_stderr": 0.04294340845212094
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.40789473684210525,
"acc_stderr": 0.03999309712777471,
"acc_norm": 0.40789473684210525,
"acc_norm_stderr": 0.03999309712777471
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.42320261437908496,
"acc_stderr": 0.019987809769482057,
"acc_norm": 0.42320261437908496,
"acc_norm_stderr": 0.019987809769482057
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.29432624113475175,
"acc_stderr": 0.027187127011503796,
"acc_norm": 0.29432624113475175,
"acc_norm_stderr": 0.027187127011503796
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.3482142857142857,
"acc_stderr": 0.04521829902833586,
"acc_norm": 0.3482142857142857,
"acc_norm_stderr": 0.04521829902833586
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4212962962962963,
"acc_stderr": 0.033674621388960775,
"acc_norm": 0.4212962962962963,
"acc_norm_stderr": 0.033674621388960775
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2569832402234637,
"acc_stderr": 0.014614465821966348,
"acc_norm": 0.2569832402234637,
"acc_norm_stderr": 0.014614465821966348
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.43,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.43,
"acc_norm_stderr": 0.04975698519562428
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.43014705882352944,
"acc_stderr": 0.030074971917302875,
"acc_norm": 0.43014705882352944,
"acc_norm_stderr": 0.030074971917302875
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5469387755102041,
"acc_stderr": 0.031867859300041275,
"acc_norm": 0.5469387755102041,
"acc_norm_stderr": 0.031867859300041275
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5780590717299579,
"acc_stderr": 0.032148146302403695,
"acc_norm": 0.5780590717299579,
"acc_norm_stderr": 0.032148146302403695
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.35919165580182527,
"acc_stderr": 0.012253386187584252,
"acc_norm": 0.35919165580182527,
"acc_norm_stderr": 0.012253386187584252
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.46078431372549017,
"acc_stderr": 0.03498501649369527,
"acc_norm": 0.46078431372549017,
"acc_norm_stderr": 0.03498501649369527
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.4727272727272727,
"acc_stderr": 0.03898531605579419,
"acc_norm": 0.4727272727272727,
"acc_norm_stderr": 0.03898531605579419
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.42472460220318237,
"mc1_stderr": 0.017304000957167488,
"mc2": 0.5765535731166324,
"mc2_stderr": 0.01654086044742999
},
"harness|ko_commongen_v2|2": {
"acc": 0.500590318772137,
"acc_stderr": 0.017190342123448586,
"acc_norm": 0.5336481700118064,
"acc_norm_stderr": 0.017151384117131865
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Alphacode-AI-Team/Alpha-LLM-Mistral7B-v2",
"model_sha": "f785c18fbe6076e9e9492c4aad0e5a4a46bdc522",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}