results / F24 /llama-2-koen-13b-slimOrca /result_2023-12-03 09:10:05.json
open-ko-llm-bot's picture
Add results for 2023-12-03 09:10:05
63d2c8a
raw history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.4061433447098976,
"acc_stderr": 0.014351656690097862,
"acc_norm": 0.46245733788395904,
"acc_norm_stderr": 0.014570144495075581
},
"harness|ko_hellaswag|10": {
"acc": 0.4186417048396734,
"acc_stderr": 0.0049232818418285165,
"acc_norm": 0.5636327424815774,
"acc_norm_stderr": 0.004949207947265917
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5730994152046783,
"acc_stderr": 0.03793620616529917,
"acc_norm": 0.5730994152046783,
"acc_norm_stderr": 0.03793620616529917
},
"harness|ko_mmlu_management|5": {
"acc": 0.6213592233009708,
"acc_stderr": 0.04802694698258974,
"acc_norm": 0.6213592233009708,
"acc_norm_stderr": 0.04802694698258974
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5466155810983397,
"acc_stderr": 0.01780208713585031,
"acc_norm": 0.5466155810983397,
"acc_norm_stderr": 0.01780208713585031
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4666666666666667,
"acc_stderr": 0.043097329010363554,
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.043097329010363554
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206824,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206824
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.44680851063829785,
"acc_stderr": 0.0325005368436584,
"acc_norm": 0.44680851063829785,
"acc_norm_stderr": 0.0325005368436584
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4457831325301205,
"acc_stderr": 0.038695433234721015,
"acc_norm": 0.4457831325301205,
"acc_norm_stderr": 0.038695433234721015
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5209003215434084,
"acc_stderr": 0.028373270961069414,
"acc_norm": 0.5209003215434084,
"acc_norm_stderr": 0.028373270961069414
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4798206278026906,
"acc_stderr": 0.033530461674123,
"acc_norm": 0.4798206278026906,
"acc_norm_stderr": 0.033530461674123
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5267175572519084,
"acc_stderr": 0.04379024936553893,
"acc_norm": 0.5267175572519084,
"acc_norm_stderr": 0.04379024936553893
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.4,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5858585858585859,
"acc_stderr": 0.03509438348879629,
"acc_norm": 0.5858585858585859,
"acc_norm_stderr": 0.03509438348879629
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.42758620689655175,
"acc_stderr": 0.04122737111370332,
"acc_norm": 0.42758620689655175,
"acc_norm_stderr": 0.04122737111370332
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.23529411764705882,
"acc_stderr": 0.04220773659171453,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.04220773659171453
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.4789915966386555,
"acc_stderr": 0.0324498084999003,
"acc_norm": 0.4789915966386555,
"acc_norm_stderr": 0.0324498084999003
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.47435897435897434,
"acc_stderr": 0.02531764972644865,
"acc_norm": 0.47435897435897434,
"acc_norm_stderr": 0.02531764972644865
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5648148148148148,
"acc_stderr": 0.04792898170907062,
"acc_norm": 0.5648148148148148,
"acc_norm_stderr": 0.04792898170907062
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.39408866995073893,
"acc_stderr": 0.03438157967036544,
"acc_norm": 0.39408866995073893,
"acc_norm_stderr": 0.03438157967036544
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.45806451612903226,
"acc_stderr": 0.028343787250540636,
"acc_norm": 0.45806451612903226,
"acc_norm_stderr": 0.028343787250540636
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7008547008547008,
"acc_stderr": 0.029996951858349476,
"acc_norm": 0.7008547008547008,
"acc_norm_stderr": 0.029996951858349476
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.45660377358490567,
"acc_stderr": 0.030656748696739435,
"acc_norm": 0.45660377358490567,
"acc_norm_stderr": 0.030656748696739435
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5636363636363636,
"acc_stderr": 0.04750185058907296,
"acc_norm": 0.5636363636363636,
"acc_norm_stderr": 0.04750185058907296
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3037037037037037,
"acc_stderr": 0.028037929969114982,
"acc_norm": 0.3037037037037037,
"acc_norm_stderr": 0.028037929969114982
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31125827814569534,
"acc_stderr": 0.03780445850526733,
"acc_norm": 0.31125827814569534,
"acc_norm_stderr": 0.03780445850526733
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6119402985074627,
"acc_stderr": 0.03445789964362749,
"acc_norm": 0.6119402985074627,
"acc_norm_stderr": 0.03445789964362749
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.4393063583815029,
"acc_stderr": 0.037842719328874674,
"acc_norm": 0.4393063583815029,
"acc_norm_stderr": 0.037842719328874674
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.30158730158730157,
"acc_stderr": 0.023636975996101803,
"acc_norm": 0.30158730158730157,
"acc_norm_stderr": 0.023636975996101803
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4236111111111111,
"acc_stderr": 0.041321250197233685,
"acc_norm": 0.4236111111111111,
"acc_norm_stderr": 0.041321250197233685
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.64,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.64,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5,
"acc_stderr": 0.026919095102908273,
"acc_norm": 0.5,
"acc_norm_stderr": 0.026919095102908273
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5030674846625767,
"acc_stderr": 0.03928297078179663,
"acc_norm": 0.5030674846625767,
"acc_norm_stderr": 0.03928297078179663
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.49382716049382713,
"acc_stderr": 0.027818623962583302,
"acc_norm": 0.49382716049382713,
"acc_norm_stderr": 0.027818623962583302
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5647668393782384,
"acc_stderr": 0.03578038165008585,
"acc_norm": 0.5647668393782384,
"acc_norm_stderr": 0.03578038165008585
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.24561403508771928,
"acc_stderr": 0.04049339297748142,
"acc_norm": 0.24561403508771928,
"acc_norm_stderr": 0.04049339297748142
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6,
"acc_stderr": 0.021004201260420078,
"acc_norm": 0.6,
"acc_norm_stderr": 0.021004201260420078
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.36507936507936506,
"acc_stderr": 0.043062412591271526,
"acc_norm": 0.36507936507936506,
"acc_norm_stderr": 0.043062412591271526
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4477124183006536,
"acc_stderr": 0.028472938478033526,
"acc_norm": 0.4477124183006536,
"acc_norm_stderr": 0.028472938478033526
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.44,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.44,
"acc_norm_stderr": 0.049888765156985884
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5867768595041323,
"acc_stderr": 0.04495087843548408,
"acc_norm": 0.5867768595041323,
"acc_norm_stderr": 0.04495087843548408
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4407894736842105,
"acc_stderr": 0.040403110624904356,
"acc_norm": 0.4407894736842105,
"acc_norm_stderr": 0.040403110624904356
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.39215686274509803,
"acc_stderr": 0.019751726508762626,
"acc_norm": 0.39215686274509803,
"acc_norm_stderr": 0.019751726508762626
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3262411347517731,
"acc_stderr": 0.02796845304356317,
"acc_norm": 0.3262411347517731,
"acc_norm_stderr": 0.02796845304356317
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.26785714285714285,
"acc_stderr": 0.04203277291467762,
"acc_norm": 0.26785714285714285,
"acc_norm_stderr": 0.04203277291467762
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.03114144782353604,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.03114144782353604
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2670391061452514,
"acc_stderr": 0.014796502622562548,
"acc_norm": 0.2670391061452514,
"acc_norm_stderr": 0.014796502622562548
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.39705882352941174,
"acc_stderr": 0.029722152099280055,
"acc_norm": 0.39705882352941174,
"acc_norm_stderr": 0.029722152099280055
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5306122448979592,
"acc_stderr": 0.031949171367580624,
"acc_norm": 0.5306122448979592,
"acc_norm_stderr": 0.031949171367580624
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5780590717299579,
"acc_stderr": 0.032148146302403695,
"acc_norm": 0.5780590717299579,
"acc_norm_stderr": 0.032148146302403695
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.318122555410691,
"acc_stderr": 0.011895407281104097,
"acc_norm": 0.318122555410691,
"acc_norm_stderr": 0.011895407281104097
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5,
"acc_stderr": 0.03509312031717982,
"acc_norm": 0.5,
"acc_norm_stderr": 0.03509312031717982
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5878787878787879,
"acc_stderr": 0.03843566993588717,
"acc_norm": 0.5878787878787879,
"acc_norm_stderr": 0.03843566993588717
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2778457772337821,
"mc1_stderr": 0.015680929364024633,
"mc2": 0.44254172455320107,
"mc2_stderr": 0.015186819172805456
},
"harness|ko_commongen_v2|2": {
"acc": 0.44510035419126326,
"acc_stderr": 0.017086417431005474,
"acc_norm": 0.4805194805194805,
"acc_norm_stderr": 0.01717730199234255
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "F24/llama-2-koen-13b-slimOrca",
"model_sha": "74138e08e67f4d1b710286b70399e75a4c03a511",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}