results / krevas /LDCC-Instruct-Llama-2-ko-13B-v3 /result_2023-10-12 23:49:38.json
open-ko-llm-bot's picture
Add results for 2023-10-12 23:49:38
e9c526a
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.37627986348122866,
"acc_stderr": 0.014157022555407161,
"acc_norm": 0.44880546075085326,
"acc_norm_stderr": 0.014534599585097672
},
"harness|ko_hellaswag|10": {
"acc": 0.4080860386377216,
"acc_stderr": 0.00490474775228696,
"acc_norm": 0.5417247560246963,
"acc_norm_stderr": 0.004972377085916327
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5321637426900585,
"acc_stderr": 0.03826882417660369,
"acc_norm": 0.5321637426900585,
"acc_norm_stderr": 0.03826882417660369
},
"harness|ko_mmlu_management|5": {
"acc": 0.34951456310679613,
"acc_stderr": 0.04721188506097172,
"acc_norm": 0.34951456310679613,
"acc_norm_stderr": 0.04721188506097172
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.454661558109834,
"acc_stderr": 0.017806304585052606,
"acc_norm": 0.454661558109834,
"acc_norm_stderr": 0.017806304585052606
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.3037037037037037,
"acc_stderr": 0.039725528847851375,
"acc_norm": 0.3037037037037037,
"acc_norm_stderr": 0.039725528847851375
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3021276595744681,
"acc_stderr": 0.03001755447188055,
"acc_norm": 0.3021276595744681,
"acc_norm_stderr": 0.03001755447188055
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3433734939759036,
"acc_stderr": 0.03696584317010601,
"acc_norm": 0.3433734939759036,
"acc_norm_stderr": 0.03696584317010601
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.40192926045016075,
"acc_stderr": 0.027846476005930477,
"acc_norm": 0.40192926045016075,
"acc_norm_stderr": 0.027846476005930477
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.45739910313901344,
"acc_stderr": 0.033435777055830646,
"acc_norm": 0.45739910313901344,
"acc_norm_stderr": 0.033435777055830646
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.366412213740458,
"acc_stderr": 0.042258754519696386,
"acc_norm": 0.366412213740458,
"acc_norm_stderr": 0.042258754519696386
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.3484848484848485,
"acc_stderr": 0.033948539651564025,
"acc_norm": 0.3484848484848485,
"acc_norm_stderr": 0.033948539651564025
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.32413793103448274,
"acc_stderr": 0.03900432069185553,
"acc_norm": 0.32413793103448274,
"acc_norm_stderr": 0.03900432069185553
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.20588235294117646,
"acc_stderr": 0.04023382273617747,
"acc_norm": 0.20588235294117646,
"acc_norm_stderr": 0.04023382273617747
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.31092436974789917,
"acc_stderr": 0.03006676158297792,
"acc_norm": 0.31092436974789917,
"acc_norm_stderr": 0.03006676158297792
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.35384615384615387,
"acc_stderr": 0.02424378399406217,
"acc_norm": 0.35384615384615387,
"acc_norm_stderr": 0.02424378399406217
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.04803752235190192,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.04803752235190192
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.030108330718011625,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.030108330718011625
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.33548387096774196,
"acc_stderr": 0.02686020644472433,
"acc_norm": 0.33548387096774196,
"acc_norm_stderr": 0.02686020644472433
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6025641025641025,
"acc_stderr": 0.03205953453789293,
"acc_norm": 0.6025641025641025,
"acc_norm_stderr": 0.03205953453789293
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.33584905660377357,
"acc_stderr": 0.029067220146644826,
"acc_norm": 0.33584905660377357,
"acc_norm_stderr": 0.029067220146644826
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4727272727272727,
"acc_stderr": 0.04782001791380063,
"acc_norm": 0.4727272727272727,
"acc_norm_stderr": 0.04782001791380063
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.21481481481481482,
"acc_stderr": 0.025040443877000693,
"acc_norm": 0.21481481481481482,
"acc_norm_stderr": 0.025040443877000693
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.23841059602649006,
"acc_stderr": 0.03479185572599661,
"acc_norm": 0.23841059602649006,
"acc_norm_stderr": 0.03479185572599661
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.4577114427860697,
"acc_stderr": 0.03522865864099597,
"acc_norm": 0.4577114427860697,
"acc_norm_stderr": 0.03522865864099597
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.32947976878612717,
"acc_stderr": 0.03583901754736412,
"acc_norm": 0.32947976878612717,
"acc_norm_stderr": 0.03583901754736412
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2566137566137566,
"acc_stderr": 0.022494510767503154,
"acc_norm": 0.2566137566137566,
"acc_norm_stderr": 0.022494510767503154
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3680555555555556,
"acc_stderr": 0.04032999053960717,
"acc_norm": 0.3680555555555556,
"acc_norm_stderr": 0.04032999053960717
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.22,
"acc_stderr": 0.041633319989322695,
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322695
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956911,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956911
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.37283236994219654,
"acc_stderr": 0.026033890613576294,
"acc_norm": 0.37283236994219654,
"acc_norm_stderr": 0.026033890613576294
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.34355828220858897,
"acc_stderr": 0.03731133519673893,
"acc_norm": 0.34355828220858897,
"acc_norm_stderr": 0.03731133519673893
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.39814814814814814,
"acc_stderr": 0.02723741509459248,
"acc_norm": 0.39814814814814814,
"acc_norm_stderr": 0.02723741509459248
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816505
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.44559585492227977,
"acc_stderr": 0.035870149860756595,
"acc_norm": 0.44559585492227977,
"acc_norm_stderr": 0.035870149860756595
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2719298245614035,
"acc_stderr": 0.04185774424022056,
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.04185774424022056
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3761467889908257,
"acc_stderr": 0.020769231968205078,
"acc_norm": 0.3761467889908257,
"acc_norm_stderr": 0.020769231968205078
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2698412698412698,
"acc_stderr": 0.03970158273235172,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.03970158273235172
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.3366013071895425,
"acc_stderr": 0.027057974624494382,
"acc_norm": 0.3366013071895425,
"acc_norm_stderr": 0.027057974624494382
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.628099173553719,
"acc_stderr": 0.044120158066245044,
"acc_norm": 0.628099173553719,
"acc_norm_stderr": 0.044120158066245044
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.34868421052631576,
"acc_stderr": 0.03878139888797611,
"acc_norm": 0.34868421052631576,
"acc_norm_stderr": 0.03878139888797611
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3562091503267974,
"acc_stderr": 0.019373332420724507,
"acc_norm": 0.3562091503267974,
"acc_norm_stderr": 0.019373332420724507
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3191489361702128,
"acc_stderr": 0.027807990141320186,
"acc_norm": 0.3191489361702128,
"acc_norm_stderr": 0.027807990141320186
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.33035714285714285,
"acc_stderr": 0.04464285714285713,
"acc_norm": 0.33035714285714285,
"acc_norm_stderr": 0.04464285714285713
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.1712962962962963,
"acc_stderr": 0.025695341643824705,
"acc_norm": 0.1712962962962963,
"acc_norm_stderr": 0.025695341643824705
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2770949720670391,
"acc_stderr": 0.014968772435812145,
"acc_norm": 0.2770949720670391,
"acc_norm_stderr": 0.014968772435812145
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.2536764705882353,
"acc_stderr": 0.02643132987078954,
"acc_norm": 0.2536764705882353,
"acc_norm_stderr": 0.02643132987078954
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.2612244897959184,
"acc_stderr": 0.028123429335142797,
"acc_norm": 0.2612244897959184,
"acc_norm_stderr": 0.028123429335142797
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.540084388185654,
"acc_stderr": 0.03244246810187913,
"acc_norm": 0.540084388185654,
"acc_norm_stderr": 0.03244246810187913
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.32529335071707954,
"acc_stderr": 0.011965311536571528,
"acc_norm": 0.32529335071707954,
"acc_norm_stderr": 0.011965311536571528
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.46568627450980393,
"acc_stderr": 0.03501038327635897,
"acc_norm": 0.46568627450980393,
"acc_norm_stderr": 0.03501038327635897
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.46060606060606063,
"acc_stderr": 0.03892207016552013,
"acc_norm": 0.46060606060606063,
"acc_norm_stderr": 0.03892207016552013
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.23255813953488372,
"mc1_stderr": 0.014789157531080538,
"mc2": 0.3996297603129026,
"mc2_stderr": 0.014698539951630042
},
"harness|ko_commongen_v2|2": {
"acc": 0.3474178403755869,
"acc_stderr": 0.016322206819108943,
"acc_norm": 0.4518779342723005,
"acc_norm_stderr": 0.017060212258103228
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v3",
"model_sha": "f2f7f1ba950c94dab2f672259dcf420fe22f31ca",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}