results / Korabbit /llama-2-ko-7b /result_2023-11-05 04:27:18.json
open-ko-llm-bot's picture
Add results for 2023-11-05 04:27:18
68273f8
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2354948805460751,
"acc_stderr": 0.012399451855004748,
"acc_norm": 0.2713310580204778,
"acc_norm_stderr": 0.012993807727545792
},
"harness|ko_hellaswag|10": {
"acc": 0.28649671380203146,
"acc_stderr": 0.004512002459757949,
"acc_norm": 0.3359888468432583,
"acc_norm_stderr": 0.004713696694131676
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4502923976608187,
"acc_stderr": 0.038158273659132366,
"acc_norm": 0.4502923976608187,
"acc_norm_stderr": 0.038158273659132366
},
"harness|ko_mmlu_management|5": {
"acc": 0.27184466019417475,
"acc_stderr": 0.044052680241409216,
"acc_norm": 0.27184466019417475,
"acc_norm_stderr": 0.044052680241409216
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.3269476372924649,
"acc_stderr": 0.016774908180131463,
"acc_norm": 0.3269476372924649,
"acc_norm_stderr": 0.016774908180131463
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.362962962962963,
"acc_stderr": 0.041539484047424,
"acc_norm": 0.362962962962963,
"acc_norm_stderr": 0.041539484047424
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2851063829787234,
"acc_stderr": 0.02951319662553935,
"acc_norm": 0.2851063829787234,
"acc_norm_stderr": 0.02951319662553935
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3192771084337349,
"acc_stderr": 0.03629335329947859,
"acc_norm": 0.3192771084337349,
"acc_norm_stderr": 0.03629335329947859
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3311897106109325,
"acc_stderr": 0.026730620728004917,
"acc_norm": 0.3311897106109325,
"acc_norm_stderr": 0.026730620728004917
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.3452914798206278,
"acc_stderr": 0.03191100192835794,
"acc_norm": 0.3452914798206278,
"acc_norm_stderr": 0.03191100192835794
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.24427480916030533,
"acc_stderr": 0.03768335959728743,
"acc_norm": 0.24427480916030533,
"acc_norm_stderr": 0.03768335959728743
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.29292929292929293,
"acc_stderr": 0.032424979581788166,
"acc_norm": 0.29292929292929293,
"acc_norm_stderr": 0.032424979581788166
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3724137931034483,
"acc_stderr": 0.0402873153294756,
"acc_norm": 0.3724137931034483,
"acc_norm_stderr": 0.0402873153294756
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.16666666666666666,
"acc_stderr": 0.03708284662416543,
"acc_norm": 0.16666666666666666,
"acc_norm_stderr": 0.03708284662416543
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.28991596638655465,
"acc_stderr": 0.029472485833136084,
"acc_norm": 0.28991596638655465,
"acc_norm_stderr": 0.029472485833136084
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2743589743589744,
"acc_stderr": 0.022622765767493214,
"acc_norm": 0.2743589743589744,
"acc_norm_stderr": 0.022622765767493214
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.04793724854411019,
"acc_norm": 0.35,
"acc_norm_stderr": 0.04793724854411019
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.4166666666666667,
"acc_stderr": 0.04766075165356461,
"acc_norm": 0.4166666666666667,
"acc_norm_stderr": 0.04766075165356461
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3448275862068966,
"acc_stderr": 0.03344283744280458,
"acc_norm": 0.3448275862068966,
"acc_norm_stderr": 0.03344283744280458
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3709677419354839,
"acc_stderr": 0.02748054188795359,
"acc_norm": 0.3709677419354839,
"acc_norm_stderr": 0.02748054188795359
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.41452991452991456,
"acc_stderr": 0.03227396567623778,
"acc_norm": 0.41452991452991456,
"acc_norm_stderr": 0.03227396567623778
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.3132075471698113,
"acc_stderr": 0.02854479331905533,
"acc_norm": 0.3132075471698113,
"acc_norm_stderr": 0.02854479331905533
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.34545454545454546,
"acc_stderr": 0.04554619617541053,
"acc_norm": 0.34545454545454546,
"acc_norm_stderr": 0.04554619617541053
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2814814814814815,
"acc_stderr": 0.027420019350945287,
"acc_norm": 0.2814814814814815,
"acc_norm_stderr": 0.027420019350945287
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2781456953642384,
"acc_stderr": 0.03658603262763743,
"acc_norm": 0.2781456953642384,
"acc_norm_stderr": 0.03658603262763743
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.3482587064676617,
"acc_stderr": 0.033687874661154596,
"acc_norm": 0.3482587064676617,
"acc_norm_stderr": 0.033687874661154596
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.24277456647398843,
"acc_stderr": 0.0326926380614177,
"acc_norm": 0.24277456647398843,
"acc_norm_stderr": 0.0326926380614177
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.26455026455026454,
"acc_stderr": 0.022717467897708607,
"acc_norm": 0.26455026455026454,
"acc_norm_stderr": 0.022717467897708607
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.037455547914624576,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.037455547914624576
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036845,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036845
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.48,
"acc_stderr": 0.05021167315686779,
"acc_norm": 0.48,
"acc_norm_stderr": 0.05021167315686779
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.37572254335260113,
"acc_stderr": 0.02607431485165708,
"acc_norm": 0.37572254335260113,
"acc_norm_stderr": 0.02607431485165708
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3128834355828221,
"acc_stderr": 0.036429145782924034,
"acc_norm": 0.3128834355828221,
"acc_norm_stderr": 0.036429145782924034
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.3117283950617284,
"acc_stderr": 0.02577311116963045,
"acc_norm": 0.3117283950617284,
"acc_norm_stderr": 0.02577311116963045
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.30569948186528495,
"acc_stderr": 0.033248379397581594,
"acc_norm": 0.30569948186528495,
"acc_norm_stderr": 0.033248379397581594
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.039994238792813365,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.039994238792813365
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.24587155963302754,
"acc_stderr": 0.018461940968708457,
"acc_norm": 0.24587155963302754,
"acc_norm_stderr": 0.018461940968708457
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.1984126984126984,
"acc_stderr": 0.03567016675276863,
"acc_norm": 0.1984126984126984,
"acc_norm_stderr": 0.03567016675276863
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.30392156862745096,
"acc_stderr": 0.026336613469046637,
"acc_norm": 0.30392156862745096,
"acc_norm_stderr": 0.026336613469046637
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5041322314049587,
"acc_stderr": 0.04564198767432754,
"acc_norm": 0.5041322314049587,
"acc_norm_stderr": 0.04564198767432754
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3026315789473684,
"acc_stderr": 0.03738520676119668,
"acc_norm": 0.3026315789473684,
"acc_norm_stderr": 0.03738520676119668
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.29411764705882354,
"acc_stderr": 0.018433427649401903,
"acc_norm": 0.29411764705882354,
"acc_norm_stderr": 0.018433427649401903
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.29432624113475175,
"acc_stderr": 0.0271871270115038,
"acc_norm": 0.29432624113475175,
"acc_norm_stderr": 0.0271871270115038
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.35714285714285715,
"acc_stderr": 0.04547960999764376,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.04547960999764376
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.35185185185185186,
"acc_stderr": 0.03256850570293649,
"acc_norm": 0.35185185185185186,
"acc_norm_stderr": 0.03256850570293649
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.18382352941176472,
"acc_stderr": 0.023529242185193106,
"acc_norm": 0.18382352941176472,
"acc_norm_stderr": 0.023529242185193106
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.2693877551020408,
"acc_stderr": 0.02840125202902294,
"acc_norm": 0.2693877551020408,
"acc_norm_stderr": 0.02840125202902294
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.2911392405063291,
"acc_stderr": 0.029571601065753374,
"acc_norm": 0.2911392405063291,
"acc_norm_stderr": 0.029571601065753374
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.26597131681877445,
"acc_stderr": 0.011285033165551286,
"acc_norm": 0.26597131681877445,
"acc_norm_stderr": 0.011285033165551286
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.03019028245350195,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.03019028245350195
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.3212121212121212,
"acc_stderr": 0.0364620496325381,
"acc_norm": 0.3212121212121212,
"acc_norm_stderr": 0.0364620496325381
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2631578947368421,
"mc1_stderr": 0.015415241740237038,
"mc2": 0.4626002465688359,
"mc2_stderr": 0.016132004385948653
},
"harness|ko_commongen_v2|2": {
"acc": 0.23376623376623376,
"acc_stderr": 0.01455078258710312,
"acc_norm": 0.3234946871310508,
"acc_norm_stderr": 0.016083627290483675
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Korabbit/llama-2-ko-7b",
"model_sha": "781e10378a374e3d2ecc7eaa71ffad5d912cc040",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}