results / Alphacode-AI /AlphaMist7B-slr-v4_2 /result_2024-04-10 08:23:28.json
open-ko-llm-bot's picture
Add results for 2024-04-10 08:23:28
746c826 verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.46075085324232085,
"acc_stderr": 0.014566303676636581,
"acc_norm": 0.5068259385665529,
"acc_norm_stderr": 0.014610029151379813
},
"harness|ko_hellaswag|10": {
"acc": 0.5566620195180243,
"acc_stderr": 0.004957637648426469,
"acc_norm": 0.6767576180043816,
"acc_norm_stderr": 0.004667585072717508
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5087719298245614,
"acc_stderr": 0.038342347441649924,
"acc_norm": 0.5087719298245614,
"acc_norm_stderr": 0.038342347441649924
},
"harness|ko_mmlu_management|5": {
"acc": 0.5825242718446602,
"acc_stderr": 0.048828405482122375,
"acc_norm": 0.5825242718446602,
"acc_norm_stderr": 0.048828405482122375
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.017867695938429774,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.017867695938429774
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.04171654161354543,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.04171654161354543
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3617021276595745,
"acc_stderr": 0.031410821975962386,
"acc_norm": 0.3617021276595745,
"acc_norm_stderr": 0.031410821975962386
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4397590361445783,
"acc_stderr": 0.03864139923699121,
"acc_norm": 0.4397590361445783,
"acc_norm_stderr": 0.03864139923699121
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4983922829581994,
"acc_stderr": 0.02839794490780661,
"acc_norm": 0.4983922829581994,
"acc_norm_stderr": 0.02839794490780661
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4349775784753363,
"acc_stderr": 0.03327283370271344,
"acc_norm": 0.4349775784753363,
"acc_norm_stderr": 0.03327283370271344
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.48854961832061067,
"acc_stderr": 0.043841400240780176,
"acc_norm": 0.48854961832061067,
"acc_norm_stderr": 0.043841400240780176
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.56,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.56,
"acc_norm_stderr": 0.049888765156985884
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6212121212121212,
"acc_stderr": 0.03456088731993747,
"acc_norm": 0.6212121212121212,
"acc_norm_stderr": 0.03456088731993747
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4482758620689655,
"acc_stderr": 0.04144311810878151,
"acc_norm": 0.4482758620689655,
"acc_norm_stderr": 0.04144311810878151
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3235294117647059,
"acc_stderr": 0.04655010411319616,
"acc_norm": 0.3235294117647059,
"acc_norm_stderr": 0.04655010411319616
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5966386554621849,
"acc_stderr": 0.031866081214088314,
"acc_norm": 0.5966386554621849,
"acc_norm_stderr": 0.031866081214088314
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5205128205128206,
"acc_stderr": 0.02532966316348994,
"acc_norm": 0.5205128205128206,
"acc_norm_stderr": 0.02532966316348994
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956911,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956911
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.0483036602463533,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.0483036602463533
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4236453201970443,
"acc_stderr": 0.03476725747649038,
"acc_norm": 0.4236453201970443,
"acc_norm_stderr": 0.03476725747649038
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.46774193548387094,
"acc_stderr": 0.028384747788813332,
"acc_norm": 0.46774193548387094,
"acc_norm_stderr": 0.028384747788813332
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6752136752136753,
"acc_stderr": 0.03067902276549883,
"acc_norm": 0.6752136752136753,
"acc_norm_stderr": 0.03067902276549883
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.44528301886792454,
"acc_stderr": 0.030588052974270648,
"acc_norm": 0.44528301886792454,
"acc_norm_stderr": 0.030588052974270648
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5363636363636364,
"acc_stderr": 0.04776449162396197,
"acc_norm": 0.5363636363636364,
"acc_norm_stderr": 0.04776449162396197
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.027840811495871927,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.027840811495871927
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2913907284768212,
"acc_stderr": 0.03710185726119995,
"acc_norm": 0.2913907284768212,
"acc_norm_stderr": 0.03710185726119995
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5621890547263682,
"acc_stderr": 0.0350808011219984,
"acc_norm": 0.5621890547263682,
"acc_norm_stderr": 0.0350808011219984
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3988439306358382,
"acc_stderr": 0.03733626655383509,
"acc_norm": 0.3988439306358382,
"acc_norm_stderr": 0.03733626655383509
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.36243386243386244,
"acc_stderr": 0.024757473902752052,
"acc_norm": 0.36243386243386244,
"acc_norm_stderr": 0.024757473902752052
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3680555555555556,
"acc_stderr": 0.04032999053960718,
"acc_norm": 0.3680555555555556,
"acc_norm_stderr": 0.04032999053960718
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709390974,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709390974
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5057803468208093,
"acc_stderr": 0.026917296179149116,
"acc_norm": 0.5057803468208093,
"acc_norm_stderr": 0.026917296179149116
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.49079754601226994,
"acc_stderr": 0.03927705600787443,
"acc_norm": 0.49079754601226994,
"acc_norm_stderr": 0.03927705600787443
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4691358024691358,
"acc_stderr": 0.027767689606833932,
"acc_norm": 0.4691358024691358,
"acc_norm_stderr": 0.027767689606833932
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5751295336787565,
"acc_stderr": 0.035674713352125395,
"acc_norm": 0.5751295336787565,
"acc_norm_stderr": 0.035674713352125395
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2894736842105263,
"acc_stderr": 0.04266339443159394,
"acc_norm": 0.2894736842105263,
"acc_norm_stderr": 0.04266339443159394
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5339449541284403,
"acc_stderr": 0.021387863350353996,
"acc_norm": 0.5339449541284403,
"acc_norm_stderr": 0.021387863350353996
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.46825396825396826,
"acc_stderr": 0.04463112720677172,
"acc_norm": 0.46825396825396826,
"acc_norm_stderr": 0.04463112720677172
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5098039215686274,
"acc_stderr": 0.02862441255016795,
"acc_norm": 0.5098039215686274,
"acc_norm_stderr": 0.02862441255016795
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.52,
"acc_stderr": 0.05021167315686779,
"acc_norm": 0.52,
"acc_norm_stderr": 0.05021167315686779
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6776859504132231,
"acc_stderr": 0.042664163633521685,
"acc_norm": 0.6776859504132231,
"acc_norm_stderr": 0.042664163633521685
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4342105263157895,
"acc_stderr": 0.04033565667848319,
"acc_norm": 0.4342105263157895,
"acc_norm_stderr": 0.04033565667848319
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.42483660130718953,
"acc_stderr": 0.01999797303545833,
"acc_norm": 0.42483660130718953,
"acc_norm_stderr": 0.01999797303545833
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2907801418439716,
"acc_stderr": 0.027090664368353178,
"acc_norm": 0.2907801418439716,
"acc_norm_stderr": 0.027090664368353178
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.3125,
"acc_stderr": 0.043994650575715215,
"acc_norm": 0.3125,
"acc_norm_stderr": 0.043994650575715215
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.42592592592592593,
"acc_stderr": 0.03372343271653062,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.03372343271653062
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24916201117318434,
"acc_stderr": 0.014465893829859936,
"acc_norm": 0.24916201117318434,
"acc_norm_stderr": 0.014465893829859936
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237102
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.47058823529411764,
"acc_stderr": 0.030320243265004137,
"acc_norm": 0.47058823529411764,
"acc_norm_stderr": 0.030320243265004137
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5428571428571428,
"acc_stderr": 0.03189141832421396,
"acc_norm": 0.5428571428571428,
"acc_norm_stderr": 0.03189141832421396
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.569620253164557,
"acc_stderr": 0.03223017195937599,
"acc_norm": 0.569620253164557,
"acc_norm_stderr": 0.03223017195937599
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3468057366362451,
"acc_stderr": 0.012156071332318706,
"acc_norm": 0.3468057366362451,
"acc_norm_stderr": 0.012156071332318706
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4362745098039216,
"acc_stderr": 0.03480693138457039,
"acc_norm": 0.4362745098039216,
"acc_norm_stderr": 0.03480693138457039
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.41818181818181815,
"acc_stderr": 0.03851716319398394,
"acc_norm": 0.41818181818181815,
"acc_norm_stderr": 0.03851716319398394
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.401468788249694,
"mc1_stderr": 0.01716027390169365,
"mc2": 0.5698290730200505,
"mc2_stderr": 0.016066932404443
},
"harness|ko_commongen_v2|2": {
"acc": 0.47461629279811096,
"acc_stderr": 0.017168187201429253,
"acc_norm": 0.5064935064935064,
"acc_norm_stderr": 0.017188904359077307
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Alphacode-AI/AlphaMist7B-slr-v4_2",
"model_sha": "ec94cc7faa8fca35edf7e37a75b352fab398c805",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}