results / 01-ai /Yi-9B /result_2024-05-16 07:03:08.json
open-ko-llm-bot's picture
Add results for 2024-05-16 07:03:08
ab0b6e9 verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.29180887372013653,
"acc_stderr": 0.013284525292403506,
"acc_norm": 0.3430034129692833,
"acc_norm_stderr": 0.013872423223718169
},
"harness|ko_hellaswag|10": {
"acc": 0.32822146982672773,
"acc_stderr": 0.004686062421158143,
"acc_norm": 0.4041027683728341,
"acc_norm_stderr": 0.004897146690596263
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5029239766081871,
"acc_stderr": 0.03834759370936839,
"acc_norm": 0.5029239766081871,
"acc_norm_stderr": 0.03834759370936839
},
"harness|ko_mmlu_management|5": {
"acc": 0.5728155339805825,
"acc_stderr": 0.04897957737781168,
"acc_norm": 0.5728155339805825,
"acc_norm_stderr": 0.04897957737781168
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.44189016602809705,
"acc_stderr": 0.01775880053421441,
"acc_norm": 0.44189016602809705,
"acc_norm_stderr": 0.01775880053421441
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.03820169914517905,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.03820169914517905
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4297872340425532,
"acc_stderr": 0.03236214467715564,
"acc_norm": 0.4297872340425532,
"acc_norm_stderr": 0.03236214467715564
},
"harness|ko_mmlu_virology|5": {
"acc": 0.43373493975903615,
"acc_stderr": 0.03858158940685515,
"acc_norm": 0.43373493975903615,
"acc_norm_stderr": 0.03858158940685515
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4662379421221865,
"acc_stderr": 0.028333277109562783,
"acc_norm": 0.4662379421221865,
"acc_norm_stderr": 0.028333277109562783
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.43946188340807174,
"acc_stderr": 0.03331092511038179,
"acc_norm": 0.43946188340807174,
"acc_norm_stderr": 0.03331092511038179
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5114503816793893,
"acc_stderr": 0.04384140024078016,
"acc_norm": 0.5114503816793893,
"acc_norm_stderr": 0.04384140024078016
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956913,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956913
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5656565656565656,
"acc_stderr": 0.03531505879359183,
"acc_norm": 0.5656565656565656,
"acc_norm_stderr": 0.03531505879359183
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5310344827586206,
"acc_stderr": 0.04158632762097828,
"acc_norm": 0.5310344827586206,
"acc_norm_stderr": 0.04158632762097828
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3137254901960784,
"acc_stderr": 0.04617034827006717,
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.04617034827006717
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5378151260504201,
"acc_stderr": 0.0323854694875898,
"acc_norm": 0.5378151260504201,
"acc_norm_stderr": 0.0323854694875898
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4794871794871795,
"acc_stderr": 0.025329663163489943,
"acc_norm": 0.4794871794871795,
"acc_norm_stderr": 0.025329663163489943
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237102
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6111111111111112,
"acc_stderr": 0.04712821257426769,
"acc_norm": 0.6111111111111112,
"acc_norm_stderr": 0.04712821257426769
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.03481904844438803,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.03481904844438803
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.43870967741935485,
"acc_stderr": 0.028229497320317223,
"acc_norm": 0.43870967741935485,
"acc_norm_stderr": 0.028229497320317223
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7435897435897436,
"acc_stderr": 0.028605953702004243,
"acc_norm": 0.7435897435897436,
"acc_norm_stderr": 0.028605953702004243
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4679245283018868,
"acc_stderr": 0.030709486992556545,
"acc_norm": 0.4679245283018868,
"acc_norm_stderr": 0.030709486992556545
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.45454545454545453,
"acc_stderr": 0.04769300568972743,
"acc_norm": 0.45454545454545453,
"acc_norm_stderr": 0.04769300568972743
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.37777777777777777,
"acc_stderr": 0.029560707392465715,
"acc_norm": 0.37777777777777777,
"acc_norm_stderr": 0.029560707392465715
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2980132450331126,
"acc_stderr": 0.037345356767871984,
"acc_norm": 0.2980132450331126,
"acc_norm_stderr": 0.037345356767871984
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5621890547263682,
"acc_stderr": 0.035080801121998406,
"acc_norm": 0.5621890547263682,
"acc_norm_stderr": 0.035080801121998406
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.4277456647398844,
"acc_stderr": 0.037724468575180276,
"acc_norm": 0.4277456647398844,
"acc_norm_stderr": 0.037724468575180276
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.49206349206349204,
"acc_stderr": 0.025748065871673286,
"acc_norm": 0.49206349206349204,
"acc_norm_stderr": 0.025748065871673286
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3263888888888889,
"acc_stderr": 0.03921067198982266,
"acc_norm": 0.3263888888888889,
"acc_norm_stderr": 0.03921067198982266
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.47109826589595377,
"acc_stderr": 0.02687408588351835,
"acc_norm": 0.47109826589595377,
"acc_norm_stderr": 0.02687408588351835
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.44785276073619634,
"acc_stderr": 0.03906947479456601,
"acc_norm": 0.44785276073619634,
"acc_norm_stderr": 0.03906947479456601
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4845679012345679,
"acc_stderr": 0.02780749004427621,
"acc_norm": 0.4845679012345679,
"acc_norm_stderr": 0.02780749004427621
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.47150259067357514,
"acc_stderr": 0.03602573571288441,
"acc_norm": 0.47150259067357514,
"acc_norm_stderr": 0.03602573571288441
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.37719298245614036,
"acc_stderr": 0.04559522141958216,
"acc_norm": 0.37719298245614036,
"acc_norm_stderr": 0.04559522141958216
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5009174311926605,
"acc_stderr": 0.021437287056051215,
"acc_norm": 0.5009174311926605,
"acc_norm_stderr": 0.021437287056051215
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.4523809523809524,
"acc_stderr": 0.044518079590553275,
"acc_norm": 0.4523809523809524,
"acc_norm_stderr": 0.044518079590553275
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4869281045751634,
"acc_stderr": 0.028620130800700246,
"acc_norm": 0.4869281045751634,
"acc_norm_stderr": 0.028620130800700246
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.628099173553719,
"acc_stderr": 0.04412015806624504,
"acc_norm": 0.628099173553719,
"acc_norm_stderr": 0.04412015806624504
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4144736842105263,
"acc_stderr": 0.04008973785779207,
"acc_norm": 0.4144736842105263,
"acc_norm_stderr": 0.04008973785779207
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3839869281045752,
"acc_stderr": 0.019675808135281532,
"acc_norm": 0.3839869281045752,
"acc_norm_stderr": 0.019675808135281532
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3900709219858156,
"acc_stderr": 0.029097675599463926,
"acc_norm": 0.3900709219858156,
"acc_norm_stderr": 0.029097675599463926
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.44642857142857145,
"acc_stderr": 0.04718471485219588,
"acc_norm": 0.44642857142857145,
"acc_norm_stderr": 0.04718471485219588
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.49537037037037035,
"acc_stderr": 0.03409825519163572,
"acc_norm": 0.49537037037037035,
"acc_norm_stderr": 0.03409825519163572
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2547486033519553,
"acc_stderr": 0.014572650383409153,
"acc_norm": 0.2547486033519553,
"acc_norm_stderr": 0.014572650383409153
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.45,
"acc_stderr": 0.05,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.73,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.35661764705882354,
"acc_stderr": 0.029097209568411952,
"acc_norm": 0.35661764705882354,
"acc_norm_stderr": 0.029097209568411952
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5673469387755102,
"acc_stderr": 0.031717528240626645,
"acc_norm": 0.5673469387755102,
"acc_norm_stderr": 0.031717528240626645
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5021097046413502,
"acc_stderr": 0.032546938018020076,
"acc_norm": 0.5021097046413502,
"acc_norm_stderr": 0.032546938018020076
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.33833116036505867,
"acc_stderr": 0.01208426562634422,
"acc_norm": 0.33833116036505867,
"acc_norm_stderr": 0.01208426562634422
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.0345423658538061,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.0345423658538061
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.3575757575757576,
"acc_stderr": 0.037425970438065864,
"acc_norm": 0.3575757575757576,
"acc_norm_stderr": 0.037425970438065864
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.29008567931456547,
"mc1_stderr": 0.01588623687420952,
"mc2": 0.47399915157940936,
"mc2_stderr": 0.015777434106257295
},
"harness|ko_commongen_v2|2": {
"acc": 0.4805194805194805,
"acc_stderr": 0.01717730199234255,
"acc_norm": 0.5053128689492326,
"acc_norm_stderr": 0.01718938362722971
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "01-ai/Yi-9B",
"model_sha": "95b8e272566167182ef1c53657a97d87a4084c9e",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}