results / AIFT /aift-llama2-koen-instruct-v1.1-dpo-test1 /result_2023-12-18 03:42:48.json
open-ko-llm-bot's picture
Add results for 2023-12-18 03:42:48
8296902
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3822525597269625,
"acc_stderr": 0.014200454049979279,
"acc_norm": 0.44112627986348124,
"acc_norm_stderr": 0.014509747749064664
},
"harness|ko_hellaswag|10": {
"acc": 0.41943835889265085,
"acc_stderr": 0.004924586362301652,
"acc_norm": 0.5719976100378411,
"acc_norm_stderr": 0.004937779821908573
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4678362573099415,
"acc_stderr": 0.038268824176603704,
"acc_norm": 0.4678362573099415,
"acc_norm_stderr": 0.038268824176603704
},
"harness|ko_mmlu_management|5": {
"acc": 0.5145631067961165,
"acc_stderr": 0.04948637324026637,
"acc_norm": 0.5145631067961165,
"acc_norm_stderr": 0.04948637324026637
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.01776925058353325,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.01776925058353325
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.48148148148148145,
"acc_stderr": 0.043163785995113245,
"acc_norm": 0.48148148148148145,
"acc_norm_stderr": 0.043163785995113245
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.425531914893617,
"acc_stderr": 0.03232146916224468,
"acc_norm": 0.425531914893617,
"acc_norm_stderr": 0.03232146916224468
},
"harness|ko_mmlu_virology|5": {
"acc": 0.40963855421686746,
"acc_stderr": 0.038284011150790206,
"acc_norm": 0.40963855421686746,
"acc_norm_stderr": 0.038284011150790206
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5176848874598071,
"acc_stderr": 0.028380322849077138,
"acc_norm": 0.5176848874598071,
"acc_norm_stderr": 0.028380322849077138
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.515695067264574,
"acc_stderr": 0.0335412657542081,
"acc_norm": 0.515695067264574,
"acc_norm_stderr": 0.0335412657542081
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5190839694656488,
"acc_stderr": 0.043820947055509867,
"acc_norm": 0.5190839694656488,
"acc_norm_stderr": 0.043820947055509867
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5707070707070707,
"acc_stderr": 0.03526552724601198,
"acc_norm": 0.5707070707070707,
"acc_norm_stderr": 0.03526552724601198
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.43448275862068964,
"acc_stderr": 0.041307408795554966,
"acc_norm": 0.43448275862068964,
"acc_norm_stderr": 0.041307408795554966
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.19607843137254902,
"acc_stderr": 0.039505818611799616,
"acc_norm": 0.19607843137254902,
"acc_norm_stderr": 0.039505818611799616
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.47058823529411764,
"acc_stderr": 0.032422250271150074,
"acc_norm": 0.47058823529411764,
"acc_norm_stderr": 0.032422250271150074
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.40512820512820513,
"acc_stderr": 0.02489047176993815,
"acc_norm": 0.40512820512820513,
"acc_norm_stderr": 0.02489047176993815
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.63,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.63,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.46296296296296297,
"acc_stderr": 0.04820403072760627,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.04820403072760627
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3694581280788177,
"acc_stderr": 0.03395970381998574,
"acc_norm": 0.3694581280788177,
"acc_norm_stderr": 0.03395970381998574
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5129032258064516,
"acc_stderr": 0.02843453315268187,
"acc_norm": 0.5129032258064516,
"acc_norm_stderr": 0.02843453315268187
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6752136752136753,
"acc_stderr": 0.03067902276549883,
"acc_norm": 0.6752136752136753,
"acc_norm_stderr": 0.03067902276549883
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.44150943396226416,
"acc_stderr": 0.030561590426731833,
"acc_norm": 0.44150943396226416,
"acc_norm_stderr": 0.030561590426731833
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5363636363636364,
"acc_stderr": 0.04776449162396197,
"acc_norm": 0.5363636363636364,
"acc_norm_stderr": 0.04776449162396197
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.23703703703703705,
"acc_stderr": 0.025928876132766107,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.025928876132766107
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.03802039760107903,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.03802039760107903
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5124378109452736,
"acc_stderr": 0.0353443984853958,
"acc_norm": 0.5124378109452736,
"acc_norm_stderr": 0.0353443984853958
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3815028901734104,
"acc_stderr": 0.03703851193099521,
"acc_norm": 0.3815028901734104,
"acc_norm_stderr": 0.03703851193099521
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.28835978835978837,
"acc_stderr": 0.0233306540545359,
"acc_norm": 0.28835978835978837,
"acc_norm_stderr": 0.0233306540545359
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4166666666666667,
"acc_stderr": 0.041227287076512825,
"acc_norm": 0.4166666666666667,
"acc_norm_stderr": 0.041227287076512825
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.63,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.63,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.4884393063583815,
"acc_stderr": 0.026911898686377927,
"acc_norm": 0.4884393063583815,
"acc_norm_stderr": 0.026911898686377927
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4539877300613497,
"acc_stderr": 0.0391170190467718,
"acc_norm": 0.4539877300613497,
"acc_norm_stderr": 0.0391170190467718
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.49382716049382713,
"acc_stderr": 0.027818623962583295,
"acc_norm": 0.49382716049382713,
"acc_norm_stderr": 0.027818623962583295
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5233160621761658,
"acc_stderr": 0.036045136724422014,
"acc_norm": 0.5233160621761658,
"acc_norm_stderr": 0.036045136724422014
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2543859649122807,
"acc_stderr": 0.04096985139843671,
"acc_norm": 0.2543859649122807,
"acc_norm_stderr": 0.04096985139843671
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5651376146788991,
"acc_stderr": 0.021254631465609273,
"acc_norm": 0.5651376146788991,
"acc_norm_stderr": 0.021254631465609273
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2619047619047619,
"acc_stderr": 0.03932537680392869,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.03932537680392869
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.42810457516339867,
"acc_stderr": 0.028332397483664274,
"acc_norm": 0.42810457516339867,
"acc_norm_stderr": 0.028332397483664274
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6446280991735537,
"acc_stderr": 0.0436923632657398,
"acc_norm": 0.6446280991735537,
"acc_norm_stderr": 0.0436923632657398
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.40131578947368424,
"acc_stderr": 0.03988903703336284,
"acc_norm": 0.40131578947368424,
"acc_norm_stderr": 0.03988903703336284
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3954248366013072,
"acc_stderr": 0.019780465954777515,
"acc_norm": 0.3954248366013072,
"acc_norm_stderr": 0.019780465954777515
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.30851063829787234,
"acc_stderr": 0.027553366165101376,
"acc_norm": 0.30851063829787234,
"acc_norm_stderr": 0.027553366165101376
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.25892857142857145,
"acc_stderr": 0.04157751539865629,
"acc_norm": 0.25892857142857145,
"acc_norm_stderr": 0.04157751539865629
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.33796296296296297,
"acc_stderr": 0.03225941352631295,
"acc_norm": 0.33796296296296297,
"acc_norm_stderr": 0.03225941352631295
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2558659217877095,
"acc_stderr": 0.01459362092321074,
"acc_norm": 0.2558659217877095,
"acc_norm_stderr": 0.01459362092321074
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.34191176470588236,
"acc_stderr": 0.028814722422254167,
"acc_norm": 0.34191176470588236,
"acc_norm_stderr": 0.028814722422254167
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.43673469387755104,
"acc_stderr": 0.03175195237583322,
"acc_norm": 0.43673469387755104,
"acc_norm_stderr": 0.03175195237583322
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6286919831223629,
"acc_stderr": 0.03145068600744858,
"acc_norm": 0.6286919831223629,
"acc_norm_stderr": 0.03145068600744858
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.34615384615384615,
"acc_stderr": 0.012150699768228563,
"acc_norm": 0.34615384615384615,
"acc_norm_stderr": 0.012150699768228563
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5294117647058824,
"acc_stderr": 0.03503235296367994,
"acc_norm": 0.5294117647058824,
"acc_norm_stderr": 0.03503235296367994
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5696969696969697,
"acc_stderr": 0.03866225962879077,
"acc_norm": 0.5696969696969697,
"acc_norm_stderr": 0.03866225962879077
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2460220318237454,
"mc1_stderr": 0.015077219200662574,
"mc2": 0.40225459810500935,
"mc2_stderr": 0.01478258523910622
},
"harness|ko_commongen_v2|2": {
"acc": 0.38488783943329397,
"acc_stderr": 0.016728579701498665,
"acc_norm": 0.4427390791027155,
"acc_norm_stderr": 0.017077254131556217
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "AIFT/aift-llama2-koen-instruct-v1.1-dpo-test1",
"model_sha": "a416328b862669edfe25be6c305bc9f5ccc4d727",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}