results / Junmai /KIT-7B-v3 /result_2023-11-09 02:14:48.json
open-ko-llm-bot's picture
Add results for 2023-11-09 02:14:48
e36ca68
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.21416382252559726,
"acc_stderr": 0.011988383205966494,
"acc_norm": 0.26535836177474403,
"acc_norm_stderr": 0.012902554762313964
},
"harness|ko_hellaswag|10": {
"acc": 0.253734315873332,
"acc_stderr": 0.004342580277662732,
"acc_norm": 0.2401911969727146,
"acc_norm_stderr": 0.004263263933601555
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.2631578947368421,
"acc_stderr": 0.03377310252209194,
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.03377310252209194
},
"harness|ko_mmlu_management|5": {
"acc": 0.3300970873786408,
"acc_stderr": 0.046561471100123514,
"acc_norm": 0.3300970873786408,
"acc_norm_stderr": 0.046561471100123514
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.227330779054917,
"acc_stderr": 0.014987270640946015,
"acc_norm": 0.227330779054917,
"acc_norm_stderr": 0.014987270640946015
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.2518518518518518,
"acc_stderr": 0.03749850709174023,
"acc_norm": 0.2518518518518518,
"acc_norm_stderr": 0.03749850709174023
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.1829787234042553,
"acc_stderr": 0.02527604100044997,
"acc_norm": 0.1829787234042553,
"acc_norm_stderr": 0.02527604100044997
},
"harness|ko_mmlu_virology|5": {
"acc": 0.15060240963855423,
"acc_stderr": 0.02784386378726433,
"acc_norm": 0.15060240963855423,
"acc_norm_stderr": 0.02784386378726433
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.2508038585209003,
"acc_stderr": 0.024619771956697165,
"acc_norm": 0.2508038585209003,
"acc_norm_stderr": 0.024619771956697165
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.16591928251121077,
"acc_stderr": 0.02496755319654716,
"acc_norm": 0.16591928251121077,
"acc_norm_stderr": 0.02496755319654716
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.2595419847328244,
"acc_stderr": 0.0384487613978527,
"acc_norm": 0.2595419847328244,
"acc_norm_stderr": 0.0384487613978527
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036843,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036843
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.3434343434343434,
"acc_stderr": 0.03383201223244442,
"acc_norm": 0.3434343434343434,
"acc_norm_stderr": 0.03383201223244442
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2206896551724138,
"acc_stderr": 0.03455930201924812,
"acc_norm": 0.2206896551724138,
"acc_norm_stderr": 0.03455930201924812
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.04488482852329017,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.04488482852329017
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.2815126050420168,
"acc_stderr": 0.029213549414372177,
"acc_norm": 0.2815126050420168,
"acc_norm_stderr": 0.029213549414372177
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2948717948717949,
"acc_stderr": 0.0231193627582323,
"acc_norm": 0.2948717948717949,
"acc_norm_stderr": 0.0231193627582323
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036845,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036845
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036846,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036846
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.041331194402438376,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.041331194402438376
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.28078817733990147,
"acc_stderr": 0.031618563353586086,
"acc_norm": 0.28078817733990147,
"acc_norm_stderr": 0.031618563353586086
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2967741935483871,
"acc_stderr": 0.025988500792411898,
"acc_norm": 0.2967741935483871,
"acc_norm_stderr": 0.025988500792411898
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.20512820512820512,
"acc_stderr": 0.026453508054040335,
"acc_norm": 0.20512820512820512,
"acc_norm_stderr": 0.026453508054040335
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.27547169811320754,
"acc_stderr": 0.027495663683724064,
"acc_norm": 0.27547169811320754,
"acc_norm_stderr": 0.027495663683724064
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.24545454545454545,
"acc_stderr": 0.04122066502878284,
"acc_norm": 0.24545454545454545,
"acc_norm_stderr": 0.04122066502878284
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.27037037037037037,
"acc_stderr": 0.027080372815145654,
"acc_norm": 0.27037037037037037,
"acc_norm_stderr": 0.027080372815145654
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.33112582781456956,
"acc_stderr": 0.038425817186598696,
"acc_norm": 0.33112582781456956,
"acc_norm_stderr": 0.038425817186598696
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.19900497512437812,
"acc_stderr": 0.02823136509275841,
"acc_norm": 0.19900497512437812,
"acc_norm_stderr": 0.02823136509275841
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.31213872832369943,
"acc_stderr": 0.035331333893236574,
"acc_norm": 0.31213872832369943,
"acc_norm_stderr": 0.035331333893236574
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2671957671957672,
"acc_stderr": 0.022789673145776575,
"acc_norm": 0.2671957671957672,
"acc_norm_stderr": 0.022789673145776575
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2569444444444444,
"acc_stderr": 0.03653946969442099,
"acc_norm": 0.2569444444444444,
"acc_norm_stderr": 0.03653946969442099
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847415,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847415
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.2514450867052023,
"acc_stderr": 0.02335736578587404,
"acc_norm": 0.2514450867052023,
"acc_norm_stderr": 0.02335736578587404
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.26380368098159507,
"acc_stderr": 0.03462419931615624,
"acc_norm": 0.26380368098159507,
"acc_norm_stderr": 0.03462419931615624
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.02378858355165854,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.02378858355165854
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.36787564766839376,
"acc_stderr": 0.034801756684660366,
"acc_norm": 0.36787564766839376,
"acc_norm_stderr": 0.034801756684660366
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.03999423879281336,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.03999423879281336
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.30458715596330277,
"acc_stderr": 0.019732299420354038,
"acc_norm": 0.30458715596330277,
"acc_norm_stderr": 0.019732299420354038
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.30952380952380953,
"acc_stderr": 0.04134913018303316,
"acc_norm": 0.30952380952380953,
"acc_norm_stderr": 0.04134913018303316
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2679738562091503,
"acc_stderr": 0.025360603796242557,
"acc_norm": 0.2679738562091503,
"acc_norm_stderr": 0.025360603796242557
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.17,
"acc_stderr": 0.03775251680686371,
"acc_norm": 0.17,
"acc_norm_stderr": 0.03775251680686371
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.19008264462809918,
"acc_stderr": 0.035817969517092825,
"acc_norm": 0.19008264462809918,
"acc_norm_stderr": 0.035817969517092825
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3223684210526316,
"acc_stderr": 0.038035102483515854,
"acc_norm": 0.3223684210526316,
"acc_norm_stderr": 0.038035102483515854
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.24836601307189543,
"acc_stderr": 0.01747948700136476,
"acc_norm": 0.24836601307189543,
"acc_norm_stderr": 0.01747948700136476
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2765957446808511,
"acc_stderr": 0.026684564340461004,
"acc_norm": 0.2765957446808511,
"acc_norm_stderr": 0.026684564340461004
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.15178571428571427,
"acc_stderr": 0.03405702838185693,
"acc_norm": 0.15178571428571427,
"acc_norm_stderr": 0.03405702838185693
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.33796296296296297,
"acc_stderr": 0.03225941352631296,
"acc_norm": 0.33796296296296297,
"acc_norm_stderr": 0.03225941352631296
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2681564245810056,
"acc_stderr": 0.014816119635317003,
"acc_norm": 0.2681564245810056,
"acc_norm_stderr": 0.014816119635317003
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.22,
"acc_stderr": 0.041633319989322674,
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322674
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.26838235294117646,
"acc_stderr": 0.02691748122437722,
"acc_norm": 0.26838235294117646,
"acc_norm_stderr": 0.02691748122437722
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.2897959183673469,
"acc_stderr": 0.029043088683304328,
"acc_norm": 0.2897959183673469,
"acc_norm_stderr": 0.029043088683304328
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.2489451476793249,
"acc_stderr": 0.028146970599422644,
"acc_norm": 0.2489451476793249,
"acc_norm_stderr": 0.028146970599422644
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.24902216427640156,
"acc_stderr": 0.01104489226404077,
"acc_norm": 0.24902216427640156,
"acc_norm_stderr": 0.01104489226404077
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.29901960784313725,
"acc_stderr": 0.03213325717373617,
"acc_norm": 0.29901960784313725,
"acc_norm_stderr": 0.03213325717373617
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.2909090909090909,
"acc_stderr": 0.035465630196243346,
"acc_norm": 0.2909090909090909,
"acc_norm_stderr": 0.035465630196243346
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.23745410036719705,
"mc1_stderr": 0.01489627744104183,
"mc2": NaN,
"mc2_stderr": NaN
},
"harness|ko_commongen_v2|2": {
"acc": 0.0885478158205431,
"acc_stderr": 0.00976721370275642,
"acc_norm": 0.19952774498229045,
"acc_norm_stderr": 0.013740090947621325
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Junmai/KIT-7B-v3",
"model_sha": "17167805a31f62fa72d3a5c4dc2abf7201a3395d",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}