results / ITT-AF /ITT-42dot_LLM-SFT-1.3B-v2.0 /result_2024-02-08 12:04:27.json
open-ko-llm-bot's picture
Add results for 2024-02-08 12:04:27
b03ac29 verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.28498293515358364,
"acc_stderr": 0.013191348179838792,
"acc_norm": 0.34982935153583616,
"acc_norm_stderr": 0.013936809212158287
},
"harness|ko_hellaswag|10": {
"acc": 0.3681537542322247,
"acc_stderr": 0.004813177057496272,
"acc_norm": 0.4675363473411671,
"acc_norm_stderr": 0.004979252954977322
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.03615507630310935,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03615507630310935
},
"harness|ko_mmlu_management|5": {
"acc": 0.22330097087378642,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.22330097087378642,
"acc_norm_stderr": 0.04123553189891431
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.015302380123542087,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.015302380123542087
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.2,
"acc_stderr": 0.03455473702325438,
"acc_norm": 0.2,
"acc_norm_stderr": 0.03455473702325438
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2553191489361702,
"acc_stderr": 0.02850485647051421,
"acc_norm": 0.2553191489361702,
"acc_norm_stderr": 0.02850485647051421
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3072289156626506,
"acc_stderr": 0.03591566797824665,
"acc_norm": 0.3072289156626506,
"acc_norm_stderr": 0.03591566797824665
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.22508038585209003,
"acc_stderr": 0.023720088516179027,
"acc_norm": 0.22508038585209003,
"acc_norm_stderr": 0.023720088516179027
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.2600896860986547,
"acc_stderr": 0.02944249558585747,
"acc_norm": 0.2600896860986547,
"acc_norm_stderr": 0.02944249558585747
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.24427480916030533,
"acc_stderr": 0.03768335959728745,
"acc_norm": 0.24427480916030533,
"acc_norm_stderr": 0.03768335959728745
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.1919191919191919,
"acc_stderr": 0.028057791672989017,
"acc_norm": 0.1919191919191919,
"acc_norm_stderr": 0.028057791672989017
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03565998174135302,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.03565998174135302
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.04158307533083286,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.04158307533083286
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.2184873949579832,
"acc_stderr": 0.02684151432295894,
"acc_norm": 0.2184873949579832,
"acc_norm_stderr": 0.02684151432295894
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2153846153846154,
"acc_stderr": 0.020843034557462874,
"acc_norm": 0.2153846153846154,
"acc_norm_stderr": 0.020843034557462874
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.17,
"acc_stderr": 0.03775251680686371,
"acc_norm": 0.17,
"acc_norm_stderr": 0.03775251680686371
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.04236511258094633,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.04236511258094633
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.17733990147783252,
"acc_stderr": 0.026874337276808342,
"acc_norm": 0.17733990147783252,
"acc_norm_stderr": 0.026874337276808342
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2129032258064516,
"acc_stderr": 0.023287665127268545,
"acc_norm": 0.2129032258064516,
"acc_norm_stderr": 0.023287665127268545
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.2863247863247863,
"acc_stderr": 0.02961432369045665,
"acc_norm": 0.2863247863247863,
"acc_norm_stderr": 0.02961432369045665
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.22264150943396227,
"acc_stderr": 0.02560423347089909,
"acc_norm": 0.22264150943396227,
"acc_norm_stderr": 0.02560423347089909
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.04069306319721375,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.04069306319721375
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.22962962962962963,
"acc_stderr": 0.025644108639267624,
"acc_norm": 0.22962962962962963,
"acc_norm_stderr": 0.025644108639267624
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2582781456953642,
"acc_stderr": 0.035737053147634576,
"acc_norm": 0.2582781456953642,
"acc_norm_stderr": 0.035737053147634576
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.2736318407960199,
"acc_stderr": 0.03152439186555404,
"acc_norm": 0.2736318407960199,
"acc_norm_stderr": 0.03152439186555404
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2138728323699422,
"acc_stderr": 0.031265112061730424,
"acc_norm": 0.2138728323699422,
"acc_norm_stderr": 0.031265112061730424
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.02141168439369418,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.02141168439369418
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2569444444444444,
"acc_stderr": 0.03653946969442099,
"acc_norm": 0.2569444444444444,
"acc_norm_stderr": 0.03653946969442099
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.23410404624277456,
"acc_stderr": 0.022797110278071134,
"acc_norm": 0.23410404624277456,
"acc_norm_stderr": 0.022797110278071134
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.19631901840490798,
"acc_stderr": 0.031207970394709225,
"acc_norm": 0.19631901840490798,
"acc_norm_stderr": 0.031207970394709225
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.2623456790123457,
"acc_stderr": 0.02447722285613511,
"acc_norm": 0.2623456790123457,
"acc_norm_stderr": 0.02447722285613511
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421296,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421296
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.20725388601036268,
"acc_stderr": 0.029252823291803617,
"acc_norm": 0.20725388601036268,
"acc_norm_stderr": 0.029252823291803617
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.21929824561403508,
"acc_stderr": 0.03892431106518752,
"acc_norm": 0.21929824561403508,
"acc_norm_stderr": 0.03892431106518752
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.21834862385321102,
"acc_stderr": 0.01771260052872273,
"acc_norm": 0.21834862385321102,
"acc_norm_stderr": 0.01771260052872273
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3412698412698413,
"acc_stderr": 0.042407993275749255,
"acc_norm": 0.3412698412698413,
"acc_norm_stderr": 0.042407993275749255
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.22875816993464052,
"acc_stderr": 0.02405102973991226,
"acc_norm": 0.22875816993464052,
"acc_norm_stderr": 0.02405102973991226
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.24793388429752067,
"acc_stderr": 0.03941897526516304,
"acc_norm": 0.24793388429752067,
"acc_norm_stderr": 0.03941897526516304
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.21052631578947367,
"acc_stderr": 0.03317672787533157,
"acc_norm": 0.21052631578947367,
"acc_norm_stderr": 0.03317672787533157
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.25163398692810457,
"acc_stderr": 0.01755581809132227,
"acc_norm": 0.25163398692810457,
"acc_norm_stderr": 0.01755581809132227
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.23404255319148937,
"acc_stderr": 0.02525786135943241,
"acc_norm": 0.23404255319148937,
"acc_norm_stderr": 0.02525786135943241
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.25892857142857145,
"acc_stderr": 0.04157751539865629,
"acc_norm": 0.25892857142857145,
"acc_norm_stderr": 0.04157751539865629
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3472222222222222,
"acc_stderr": 0.032468872436376486,
"acc_norm": 0.3472222222222222,
"acc_norm_stderr": 0.032468872436376486
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2737430167597765,
"acc_stderr": 0.014912413096372432,
"acc_norm": 0.2737430167597765,
"acc_norm_stderr": 0.014912413096372432
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542126,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542126
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3382352941176471,
"acc_stderr": 0.02873932851398357,
"acc_norm": 0.3382352941176471,
"acc_norm_stderr": 0.02873932851398357
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.2163265306122449,
"acc_stderr": 0.026358916334904007,
"acc_norm": 0.2163265306122449,
"acc_norm_stderr": 0.026358916334904007
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.2869198312236287,
"acc_stderr": 0.029443773022594693,
"acc_norm": 0.2869198312236287,
"acc_norm_stderr": 0.029443773022594693
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2438070404172099,
"acc_stderr": 0.010966507972178479,
"acc_norm": 0.2438070404172099,
"acc_norm_stderr": 0.010966507972178479
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.24019607843137256,
"acc_stderr": 0.02998373305591361,
"acc_norm": 0.24019607843137256,
"acc_norm_stderr": 0.02998373305591361
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.21212121212121213,
"acc_stderr": 0.03192271569548299,
"acc_norm": 0.21212121212121213,
"acc_norm_stderr": 0.03192271569548299
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2484700122399021,
"mc1_stderr": 0.015127427096520691,
"mc2": 0.4168577725013622,
"mc2_stderr": 0.015076547753296358
},
"harness|ko_commongen_v2|2": {
"acc": 0.26092089728453366,
"acc_stderr": 0.015097836279964204,
"acc_norm": 0.31286894923258557,
"acc_norm_stderr": 0.015941010118302658
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "ITT-AF/ITT-42dot_LLM-SFT-1.3B-v2.0",
"model_sha": "5fecf0d23c52b71ac2d33525956308a73648c7ac",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}