results / GAI-LLM /KoSOLAR-10.7B-dpo-v1 /result_2024-01-09 05:52:01.json
open-ko-llm-bot's picture
Add results for 2024-01-09 05:52:01
050eebe verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.34044368600682595,
"acc_stderr": 0.01384746051889298,
"acc_norm": 0.36945392491467577,
"acc_norm_stderr": 0.0141045783664919
},
"harness|ko_hellaswag|10": {
"acc": 0.3231428002389962,
"acc_stderr": 0.004667209383690232,
"acc_norm": 0.3798048197570205,
"acc_norm_stderr": 0.004843462545943493
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5321637426900585,
"acc_stderr": 0.038268824176603704,
"acc_norm": 0.5321637426900585,
"acc_norm_stderr": 0.038268824176603704
},
"harness|ko_mmlu_management|5": {
"acc": 0.6116504854368932,
"acc_stderr": 0.04825729337356389,
"acc_norm": 0.6116504854368932,
"acc_norm_stderr": 0.04825729337356389
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5938697318007663,
"acc_stderr": 0.017562037406478923,
"acc_norm": 0.5938697318007663,
"acc_norm_stderr": 0.017562037406478923
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4222222222222222,
"acc_stderr": 0.04266763404099582,
"acc_norm": 0.4222222222222222,
"acc_norm_stderr": 0.04266763404099582
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816508,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816508
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4340425531914894,
"acc_stderr": 0.032400380867927465,
"acc_norm": 0.4340425531914894,
"acc_norm_stderr": 0.032400380867927465
},
"harness|ko_mmlu_virology|5": {
"acc": 0.40963855421686746,
"acc_stderr": 0.03828401115079022,
"acc_norm": 0.40963855421686746,
"acc_norm_stderr": 0.03828401115079022
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5337620578778135,
"acc_stderr": 0.02833327710956279,
"acc_norm": 0.5337620578778135,
"acc_norm_stderr": 0.02833327710956279
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.47085201793721976,
"acc_stderr": 0.03350073248773404,
"acc_norm": 0.47085201793721976,
"acc_norm_stderr": 0.03350073248773404
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.549618320610687,
"acc_stderr": 0.04363643698524779,
"acc_norm": 0.549618320610687,
"acc_norm_stderr": 0.04363643698524779
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.47,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465919
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5707070707070707,
"acc_stderr": 0.03526552724601199,
"acc_norm": 0.5707070707070707,
"acc_norm_stderr": 0.03526552724601199
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4482758620689655,
"acc_stderr": 0.04144311810878151,
"acc_norm": 0.4482758620689655,
"acc_norm_stderr": 0.04144311810878151
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.042801058373643966,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.042801058373643966
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5504201680672269,
"acc_stderr": 0.03231293497137707,
"acc_norm": 0.5504201680672269,
"acc_norm_stderr": 0.03231293497137707
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.48205128205128206,
"acc_stderr": 0.025334667080954963,
"acc_norm": 0.48205128205128206,
"acc_norm_stderr": 0.025334667080954963
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.57,
"acc_stderr": 0.0497569851956243,
"acc_norm": 0.57,
"acc_norm_stderr": 0.0497569851956243
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5833333333333334,
"acc_stderr": 0.04766075165356461,
"acc_norm": 0.5833333333333334,
"acc_norm_stderr": 0.04766075165356461
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4187192118226601,
"acc_stderr": 0.03471192860518468,
"acc_norm": 0.4187192118226601,
"acc_norm_stderr": 0.03471192860518468
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5451612903225806,
"acc_stderr": 0.028327743091561067,
"acc_norm": 0.5451612903225806,
"acc_norm_stderr": 0.028327743091561067
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6923076923076923,
"acc_stderr": 0.030236389942173092,
"acc_norm": 0.6923076923076923,
"acc_norm_stderr": 0.030236389942173092
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4981132075471698,
"acc_stderr": 0.03077265364207565,
"acc_norm": 0.4981132075471698,
"acc_norm_stderr": 0.03077265364207565
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5181818181818182,
"acc_stderr": 0.04785964010794915,
"acc_norm": 0.5181818181818182,
"acc_norm_stderr": 0.04785964010794915
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2851851851851852,
"acc_stderr": 0.027528599210340496,
"acc_norm": 0.2851851851851852,
"acc_norm_stderr": 0.027528599210340496
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.3509933774834437,
"acc_stderr": 0.03896981964257374,
"acc_norm": 0.3509933774834437,
"acc_norm_stderr": 0.03896981964257374
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6119402985074627,
"acc_stderr": 0.03445789964362749,
"acc_norm": 0.6119402985074627,
"acc_norm_stderr": 0.03445789964362749
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.4046242774566474,
"acc_stderr": 0.03742461193887248,
"acc_norm": 0.4046242774566474,
"acc_norm_stderr": 0.03742461193887248
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.38095238095238093,
"acc_stderr": 0.025010749116137602,
"acc_norm": 0.38095238095238093,
"acc_norm_stderr": 0.025010749116137602
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4513888888888889,
"acc_stderr": 0.04161402398403279,
"acc_norm": 0.4513888888888889,
"acc_norm_stderr": 0.04161402398403279
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.62,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.62,
"acc_norm_stderr": 0.04878317312145633
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.47398843930635837,
"acc_stderr": 0.02688264343402289,
"acc_norm": 0.47398843930635837,
"acc_norm_stderr": 0.02688264343402289
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4785276073619632,
"acc_stderr": 0.03924746876751129,
"acc_norm": 0.4785276073619632,
"acc_norm_stderr": 0.03924746876751129
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5709876543209876,
"acc_stderr": 0.027538925613470863,
"acc_norm": 0.5709876543209876,
"acc_norm_stderr": 0.027538925613470863
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.6476683937823834,
"acc_stderr": 0.034474782864143565,
"acc_norm": 0.6476683937823834,
"acc_norm_stderr": 0.034474782864143565
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.34210526315789475,
"acc_stderr": 0.04462917535336938,
"acc_norm": 0.34210526315789475,
"acc_norm_stderr": 0.04462917535336938
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5834862385321101,
"acc_stderr": 0.02113637650403087,
"acc_norm": 0.5834862385321101,
"acc_norm_stderr": 0.02113637650403087
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3412698412698413,
"acc_stderr": 0.042407993275749255,
"acc_norm": 0.3412698412698413,
"acc_norm_stderr": 0.042407993275749255
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5196078431372549,
"acc_stderr": 0.028607893699576073,
"acc_norm": 0.5196078431372549,
"acc_norm_stderr": 0.028607893699576073
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6611570247933884,
"acc_stderr": 0.0432076780753667,
"acc_norm": 0.6611570247933884,
"acc_norm_stderr": 0.0432076780753667
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5131578947368421,
"acc_stderr": 0.04067533136309173,
"acc_norm": 0.5131578947368421,
"acc_norm_stderr": 0.04067533136309173
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.4035947712418301,
"acc_stderr": 0.019848280168401164,
"acc_norm": 0.4035947712418301,
"acc_norm_stderr": 0.019848280168401164
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.4078014184397163,
"acc_stderr": 0.02931601177634356,
"acc_norm": 0.4078014184397163,
"acc_norm_stderr": 0.02931601177634356
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.32142857142857145,
"acc_stderr": 0.04432804055291519,
"acc_norm": 0.32142857142857145,
"acc_norm_stderr": 0.04432804055291519
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4537037037037037,
"acc_stderr": 0.033953227263757976,
"acc_norm": 0.4537037037037037,
"acc_norm_stderr": 0.033953227263757976
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.34301675977653634,
"acc_stderr": 0.015876912673057738,
"acc_norm": 0.34301675977653634,
"acc_norm_stderr": 0.015876912673057738
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.48,
"acc_stderr": 0.05021167315686779,
"acc_norm": 0.48,
"acc_norm_stderr": 0.05021167315686779
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.62,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.62,
"acc_norm_stderr": 0.04878317312145633
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.47058823529411764,
"acc_stderr": 0.03032024326500413,
"acc_norm": 0.47058823529411764,
"acc_norm_stderr": 0.03032024326500413
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.47346938775510206,
"acc_stderr": 0.03196412734523272,
"acc_norm": 0.47346938775510206,
"acc_norm_stderr": 0.03196412734523272
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6751054852320675,
"acc_stderr": 0.030486039389105307,
"acc_norm": 0.6751054852320675,
"acc_norm_stderr": 0.030486039389105307
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3578878748370274,
"acc_stderr": 0.012243563850490325,
"acc_norm": 0.3578878748370274,
"acc_norm_stderr": 0.012243563850490325
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5833333333333334,
"acc_stderr": 0.034602283272391725,
"acc_norm": 0.5833333333333334,
"acc_norm_stderr": 0.034602283272391725
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5818181818181818,
"acc_stderr": 0.03851716319398395,
"acc_norm": 0.5818181818181818,
"acc_norm_stderr": 0.03851716319398395
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2729498164014688,
"mc1_stderr": 0.015594753632006516,
"mc2": 0.5157849052136222,
"mc2_stderr": 0.01681907310324583
},
"harness|ko_commongen_v2|2": {
"acc": 0.24793388429752067,
"acc_stderr": 0.014846044968252252,
"acc_norm": 0.2939787485242031,
"acc_norm_stderr": 0.015663242569091122
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "GAI-LLM/KoSOLAR-10.7B-dpo-v1",
"model_sha": "2e96d7a3669d63376c7a49d5793d69bdcce52f72",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}