results / Alphacode-AI /Alphallama3-8B_v2 /result_2024-05-09 06:09:30.json
open-ko-llm-bot's picture
Add results for 2024-05-09 06:09:30
050572a verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3796928327645051,
"acc_stderr": 0.014182119866974872,
"acc_norm": 0.43430034129692835,
"acc_norm_stderr": 0.014484703048857359
},
"harness|ko_hellaswag|10": {
"acc": 0.40061740689105757,
"acc_stderr": 0.004890221012015059,
"acc_norm": 0.5117506472814181,
"acc_norm_stderr": 0.004988403265931467
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.038110796698335316,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.038110796698335316
},
"harness|ko_mmlu_management|5": {
"acc": 0.46601941747572817,
"acc_stderr": 0.0493929144727348,
"acc_norm": 0.46601941747572817,
"acc_norm_stderr": 0.0493929144727348
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.4329501915708812,
"acc_stderr": 0.017718469101513982,
"acc_norm": 0.4329501915708812,
"acc_norm_stderr": 0.017718469101513982
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.42962962962962964,
"acc_stderr": 0.04276349494376599,
"acc_norm": 0.42962962962962964,
"acc_norm_stderr": 0.04276349494376599
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3574468085106383,
"acc_stderr": 0.03132941789476425,
"acc_norm": 0.3574468085106383,
"acc_norm_stderr": 0.03132941789476425
},
"harness|ko_mmlu_virology|5": {
"acc": 0.35542168674698793,
"acc_stderr": 0.03726214354322415,
"acc_norm": 0.35542168674698793,
"acc_norm_stderr": 0.03726214354322415
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.43729903536977494,
"acc_stderr": 0.02817391776176288,
"acc_norm": 0.43729903536977494,
"acc_norm_stderr": 0.02817391776176288
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.36771300448430494,
"acc_stderr": 0.03236198350928276,
"acc_norm": 0.36771300448430494,
"acc_norm_stderr": 0.03236198350928276
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.40458015267175573,
"acc_stderr": 0.043046937953806645,
"acc_norm": 0.40458015267175573,
"acc_norm_stderr": 0.043046937953806645
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.46464646464646464,
"acc_stderr": 0.03553436368828063,
"acc_norm": 0.46464646464646464,
"acc_norm_stderr": 0.03553436368828063
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4689655172413793,
"acc_stderr": 0.04158632762097828,
"acc_norm": 0.4689655172413793,
"acc_norm_stderr": 0.04158632762097828
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.04389869956808778,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.04389869956808778
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3907563025210084,
"acc_stderr": 0.03169380235712997,
"acc_norm": 0.3907563025210084,
"acc_norm_stderr": 0.03169380235712997
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.36923076923076925,
"acc_stderr": 0.02446861524147891,
"acc_norm": 0.36923076923076925,
"acc_norm_stderr": 0.02446861524147891
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.047500773411999854,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.047500773411999854
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3694581280788177,
"acc_stderr": 0.03395970381998574,
"acc_norm": 0.3694581280788177,
"acc_norm_stderr": 0.03395970381998574
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4645161290322581,
"acc_stderr": 0.028372287797962952,
"acc_norm": 0.4645161290322581,
"acc_norm_stderr": 0.028372287797962952
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.5982905982905983,
"acc_stderr": 0.03211693751051621,
"acc_norm": 0.5982905982905983,
"acc_norm_stderr": 0.03211693751051621
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4075471698113208,
"acc_stderr": 0.030242233800854498,
"acc_norm": 0.4075471698113208,
"acc_norm_stderr": 0.030242233800854498
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.45454545454545453,
"acc_stderr": 0.04769300568972743,
"acc_norm": 0.45454545454545453,
"acc_norm_stderr": 0.04769300568972743
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.32222222222222224,
"acc_stderr": 0.028493465091028597,
"acc_norm": 0.32222222222222224,
"acc_norm_stderr": 0.028493465091028597
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.33774834437086093,
"acc_stderr": 0.0386155754625517,
"acc_norm": 0.33774834437086093,
"acc_norm_stderr": 0.0386155754625517
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5074626865671642,
"acc_stderr": 0.03535140084276719,
"acc_norm": 0.5074626865671642,
"acc_norm_stderr": 0.03535140084276719
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.31213872832369943,
"acc_stderr": 0.035331333893236574,
"acc_norm": 0.31213872832369943,
"acc_norm_stderr": 0.035331333893236574
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.02351729433596329,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.02351729433596329
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2847222222222222,
"acc_stderr": 0.037738099906869355,
"acc_norm": 0.2847222222222222,
"acc_norm_stderr": 0.037738099906869355
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.47,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.47,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.3699421965317919,
"acc_stderr": 0.025992472029306397,
"acc_norm": 0.3699421965317919,
"acc_norm_stderr": 0.025992472029306397
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4233128834355828,
"acc_stderr": 0.038818912133343826,
"acc_norm": 0.4233128834355828,
"acc_norm_stderr": 0.038818912133343826
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.41975308641975306,
"acc_stderr": 0.027460099557005138,
"acc_norm": 0.41975308641975306,
"acc_norm_stderr": 0.027460099557005138
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.38860103626943004,
"acc_stderr": 0.03517739796373132,
"acc_norm": 0.38860103626943004,
"acc_norm_stderr": 0.03517739796373132
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.30701754385964913,
"acc_stderr": 0.04339138322579859,
"acc_norm": 0.30701754385964913,
"acc_norm_stderr": 0.04339138322579859
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.43853211009174314,
"acc_stderr": 0.02127471307395457,
"acc_norm": 0.43853211009174314,
"acc_norm_stderr": 0.02127471307395457
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.24603174603174602,
"acc_stderr": 0.03852273364924315,
"acc_norm": 0.24603174603174602,
"acc_norm_stderr": 0.03852273364924315
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.02845263998508801,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.02845263998508801
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6446280991735537,
"acc_stderr": 0.04369236326573981,
"acc_norm": 0.6446280991735537,
"acc_norm_stderr": 0.04369236326573981
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4276315789473684,
"acc_stderr": 0.040260970832965585,
"acc_norm": 0.4276315789473684,
"acc_norm_stderr": 0.040260970832965585
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.32516339869281047,
"acc_stderr": 0.01895088677080631,
"acc_norm": 0.32516339869281047,
"acc_norm_stderr": 0.01895088677080631
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2624113475177305,
"acc_stderr": 0.026244920349843014,
"acc_norm": 0.2624113475177305,
"acc_norm_stderr": 0.026244920349843014
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.25892857142857145,
"acc_stderr": 0.041577515398656284,
"acc_norm": 0.25892857142857145,
"acc_norm_stderr": 0.041577515398656284
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.23148148148148148,
"acc_stderr": 0.028765111718046965,
"acc_norm": 0.23148148148148148,
"acc_norm_stderr": 0.028765111718046965
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574894,
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574894
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.2757352941176471,
"acc_stderr": 0.027146271936625166,
"acc_norm": 0.2757352941176471,
"acc_norm_stderr": 0.027146271936625166
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.2530612244897959,
"acc_stderr": 0.027833023871399666,
"acc_norm": 0.2530612244897959,
"acc_norm_stderr": 0.027833023871399666
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.4936708860759494,
"acc_stderr": 0.032544620107678585,
"acc_norm": 0.4936708860759494,
"acc_norm_stderr": 0.032544620107678585
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2790091264667536,
"acc_stderr": 0.011455208832803538,
"acc_norm": 0.2790091264667536,
"acc_norm_stderr": 0.011455208832803538
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.3627450980392157,
"acc_stderr": 0.03374499356319355,
"acc_norm": 0.3627450980392157,
"acc_norm_stderr": 0.03374499356319355
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.3939393939393939,
"acc_stderr": 0.0381549430868893,
"acc_norm": 0.3939393939393939,
"acc_norm_stderr": 0.0381549430868893
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.3108935128518972,
"mc1_stderr": 0.01620331667355969,
"mc2": 0.4613430780367225,
"mc2_stderr": 0.01569291605244769
},
"harness|ko_commongen_v2|2": {
"acc": 0.3600944510035419,
"acc_stderr": 0.01650368672044007,
"acc_norm": 0.4769775678866588,
"acc_norm_stderr": 0.01717212154672764
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Alphacode-AI/Alphallama3-8B_v2",
"model_sha": "33782439baaab0d356d31e007874b2e4e96c13eb",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}