results / Alphacode-AI /Alphallama3-8B /result_2024-05-02 12:00:21.json
open-ko-llm-bot's picture
Add results for 2024-05-02 12:00:21
7ede38f verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.36945392491467577,
"acc_stderr": 0.014104578366491894,
"acc_norm": 0.4180887372013652,
"acc_norm_stderr": 0.014413988396996083
},
"harness|ko_hellaswag|10": {
"acc": 0.38836885082652856,
"acc_stderr": 0.00486383136484808,
"acc_norm": 0.4993029277036447,
"acc_norm_stderr": 0.00498977656227611
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.38596491228070173,
"acc_stderr": 0.03733756969066165,
"acc_norm": 0.38596491228070173,
"acc_norm_stderr": 0.03733756969066165
},
"harness|ko_mmlu_management|5": {
"acc": 0.2815533980582524,
"acc_stderr": 0.04453254836326466,
"acc_norm": 0.2815533980582524,
"acc_norm_stderr": 0.04453254836326466
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.40485312899106,
"acc_stderr": 0.01755324646772025,
"acc_norm": 0.40485312899106,
"acc_norm_stderr": 0.01755324646772025
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.42962962962962964,
"acc_stderr": 0.04276349494376599,
"acc_norm": 0.42962962962962964,
"acc_norm_stderr": 0.04276349494376599
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.37446808510638296,
"acc_stderr": 0.031639106653672915,
"acc_norm": 0.37446808510638296,
"acc_norm_stderr": 0.031639106653672915
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3614457831325301,
"acc_stderr": 0.0374005938202932,
"acc_norm": 0.3614457831325301,
"acc_norm_stderr": 0.0374005938202932
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.40514469453376206,
"acc_stderr": 0.02788238379132595,
"acc_norm": 0.40514469453376206,
"acc_norm_stderr": 0.02788238379132595
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.2914798206278027,
"acc_stderr": 0.030500283176545916,
"acc_norm": 0.2914798206278027,
"acc_norm_stderr": 0.030500283176545916
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.3053435114503817,
"acc_stderr": 0.04039314978724561,
"acc_norm": 0.3053435114503817,
"acc_norm_stderr": 0.04039314978724561
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.3787878787878788,
"acc_stderr": 0.03456088731993747,
"acc_norm": 0.3787878787878788,
"acc_norm_stderr": 0.03456088731993747
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4413793103448276,
"acc_stderr": 0.04137931034482758,
"acc_norm": 0.4413793103448276,
"acc_norm_stderr": 0.04137931034482758
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.04280105837364397,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.04280105837364397
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3403361344537815,
"acc_stderr": 0.030778057422931673,
"acc_norm": 0.3403361344537815,
"acc_norm_stderr": 0.030778057422931673
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.358974358974359,
"acc_stderr": 0.02432173848460235,
"acc_norm": 0.358974358974359,
"acc_norm_stderr": 0.02432173848460235
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.3055555555555556,
"acc_stderr": 0.044531975073749834,
"acc_norm": 0.3055555555555556,
"acc_norm_stderr": 0.044531975073749834
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.35960591133004927,
"acc_stderr": 0.033764582465095665,
"acc_norm": 0.35960591133004927,
"acc_norm_stderr": 0.033764582465095665
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4096774193548387,
"acc_stderr": 0.027976054915347357,
"acc_norm": 0.4096774193548387,
"acc_norm_stderr": 0.027976054915347357
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.5128205128205128,
"acc_stderr": 0.0327453193884235,
"acc_norm": 0.5128205128205128,
"acc_norm_stderr": 0.0327453193884235
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.3584905660377358,
"acc_stderr": 0.029514703583981755,
"acc_norm": 0.3584905660377358,
"acc_norm_stderr": 0.029514703583981755
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.38181818181818183,
"acc_stderr": 0.04653429807913509,
"acc_norm": 0.38181818181818183,
"acc_norm_stderr": 0.04653429807913509
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3037037037037037,
"acc_stderr": 0.02803792996911499,
"acc_norm": 0.3037037037037037,
"acc_norm_stderr": 0.02803792996911499
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31125827814569534,
"acc_stderr": 0.03780445850526733,
"acc_norm": 0.31125827814569534,
"acc_norm_stderr": 0.03780445850526733
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5124378109452736,
"acc_stderr": 0.0353443984853958,
"acc_norm": 0.5124378109452736,
"acc_norm_stderr": 0.0353443984853958
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.28901734104046245,
"acc_stderr": 0.034564257450869995,
"acc_norm": 0.28901734104046245,
"acc_norm_stderr": 0.034564257450869995
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.28835978835978837,
"acc_stderr": 0.0233306540545359,
"acc_norm": 0.28835978835978837,
"acc_norm_stderr": 0.0233306540545359
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3125,
"acc_stderr": 0.038760854559127644,
"acc_norm": 0.3125,
"acc_norm_stderr": 0.038760854559127644
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.33815028901734107,
"acc_stderr": 0.025469770149400175,
"acc_norm": 0.33815028901734107,
"acc_norm_stderr": 0.025469770149400175
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.31901840490797545,
"acc_stderr": 0.03661997551073836,
"acc_norm": 0.31901840490797545,
"acc_norm_stderr": 0.03661997551073836
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.38271604938271603,
"acc_stderr": 0.027044538138402616,
"acc_norm": 0.38271604938271603,
"acc_norm_stderr": 0.027044538138402616
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.34196891191709844,
"acc_stderr": 0.03423465100104282,
"acc_norm": 0.34196891191709844,
"acc_norm_stderr": 0.03423465100104282
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.043036840335373173,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.043036840335373173
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3541284403669725,
"acc_stderr": 0.02050472901382911,
"acc_norm": 0.3541284403669725,
"acc_norm_stderr": 0.02050472901382911
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.23809523809523808,
"acc_stderr": 0.03809523809523809,
"acc_norm": 0.23809523809523808,
"acc_norm_stderr": 0.03809523809523809
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4084967320261438,
"acc_stderr": 0.028146405993096358,
"acc_norm": 0.4084967320261438,
"acc_norm_stderr": 0.028146405993096358
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.4,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5289256198347108,
"acc_stderr": 0.04556710331269498,
"acc_norm": 0.5289256198347108,
"acc_norm_stderr": 0.04556710331269498
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3684210526315789,
"acc_stderr": 0.03925523381052932,
"acc_norm": 0.3684210526315789,
"acc_norm_stderr": 0.03925523381052932
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3284313725490196,
"acc_stderr": 0.018999707383162673,
"acc_norm": 0.3284313725490196,
"acc_norm_stderr": 0.018999707383162673
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2765957446808511,
"acc_stderr": 0.026684564340460994,
"acc_norm": 0.2765957446808511,
"acc_norm_stderr": 0.026684564340460994
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.36607142857142855,
"acc_stderr": 0.0457237235873743,
"acc_norm": 0.36607142857142855,
"acc_norm_stderr": 0.0457237235873743
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.22685185185185186,
"acc_stderr": 0.028561650102422276,
"acc_norm": 0.22685185185185186,
"acc_norm_stderr": 0.028561650102422276
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.27262569832402234,
"acc_stderr": 0.014893391735249608,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249608
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816505
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.2867647058823529,
"acc_stderr": 0.02747227447323382,
"acc_norm": 0.2867647058823529,
"acc_norm_stderr": 0.02747227447323382
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.24489795918367346,
"acc_stderr": 0.02752963744017493,
"acc_norm": 0.24489795918367346,
"acc_norm_stderr": 0.02752963744017493
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.4345991561181435,
"acc_stderr": 0.03226759995510145,
"acc_norm": 0.4345991561181435,
"acc_norm_stderr": 0.03226759995510145
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.28226857887874834,
"acc_stderr": 0.011495852176241947,
"acc_norm": 0.28226857887874834,
"acc_norm_stderr": 0.011495852176241947
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.29901960784313725,
"acc_stderr": 0.03213325717373617,
"acc_norm": 0.29901960784313725,
"acc_norm_stderr": 0.03213325717373617
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.3696969696969697,
"acc_stderr": 0.03769430314512568,
"acc_norm": 0.3696969696969697,
"acc_norm_stderr": 0.03769430314512568
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2631578947368421,
"mc1_stderr": 0.015415241740237017,
"mc2": 0.4188327081335226,
"mc2_stderr": 0.015492466155742542
},
"harness|ko_commongen_v2|2": {
"acc": 0.3364817001180638,
"acc_stderr": 0.016245085294386546,
"acc_norm": 0.4805194805194805,
"acc_norm_stderr": 0.017177301992342547
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Alphacode-AI/Alphallama3-8B",
"model_sha": "a202fe10779c9936312b61e504bc2b76dac063dc",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}