results / JY623 /KoSOLRA-10.7B-merge-v2.3 /result_2024-04-05 08:51:42.json
open-ko-llm-bot's picture
Add results for 2024-04-05 08:51:42
73587b0 verified
raw history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.7056313993174061,
"acc_stderr": 0.013318528460539422,
"acc_norm": 0.7517064846416383,
"acc_norm_stderr": 0.012624912868089762
},
"harness|ko_hellaswag|10": {
"acc": 0.5329615614419438,
"acc_stderr": 0.00497892716479289,
"acc_norm": 0.6955785700059749,
"acc_norm_stderr": 0.004592215118295274
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.7309941520467836,
"acc_stderr": 0.03401052620104089,
"acc_norm": 0.7309941520467836,
"acc_norm_stderr": 0.03401052620104089
},
"harness|ko_mmlu_management|5": {
"acc": 0.7669902912621359,
"acc_stderr": 0.04185832598928315,
"acc_norm": 0.7669902912621359,
"acc_norm_stderr": 0.04185832598928315
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.7369093231162197,
"acc_stderr": 0.015745497169049046,
"acc_norm": 0.7369093231162197,
"acc_norm_stderr": 0.015745497169049046
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.45185185185185184,
"acc_stderr": 0.04299268905480863,
"acc_norm": 0.45185185185185184,
"acc_norm_stderr": 0.04299268905480863
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.5404255319148936,
"acc_stderr": 0.03257901482099834,
"acc_norm": 0.5404255319148936,
"acc_norm_stderr": 0.03257901482099834
},
"harness|ko_mmlu_virology|5": {
"acc": 0.5120481927710844,
"acc_stderr": 0.03891364495835817,
"acc_norm": 0.5120481927710844,
"acc_norm_stderr": 0.03891364495835817
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.6591639871382636,
"acc_stderr": 0.026920841260776155,
"acc_norm": 0.6591639871382636,
"acc_norm_stderr": 0.026920841260776155
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.672645739910314,
"acc_stderr": 0.03149384670994131,
"acc_norm": 0.672645739910314,
"acc_norm_stderr": 0.03149384670994131
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.6106870229007634,
"acc_stderr": 0.042764865428145914,
"acc_norm": 0.6106870229007634,
"acc_norm_stderr": 0.042764865428145914
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.51,
"acc_stderr": 0.050241839379569095,
"acc_norm": 0.51,
"acc_norm_stderr": 0.050241839379569095
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.7626262626262627,
"acc_stderr": 0.030313710538198906,
"acc_norm": 0.7626262626262627,
"acc_norm_stderr": 0.030313710538198906
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5655172413793104,
"acc_stderr": 0.04130740879555497,
"acc_norm": 0.5655172413793104,
"acc_norm_stderr": 0.04130740879555497
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.35294117647058826,
"acc_stderr": 0.047551296160629475,
"acc_norm": 0.35294117647058826,
"acc_norm_stderr": 0.047551296160629475
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.6554621848739496,
"acc_stderr": 0.030868682604121622,
"acc_norm": 0.6554621848739496,
"acc_norm_stderr": 0.030868682604121622
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.6487179487179487,
"acc_stderr": 0.024203665177902806,
"acc_norm": 0.6487179487179487,
"acc_norm_stderr": 0.024203665177902806
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6851851851851852,
"acc_stderr": 0.04489931073591312,
"acc_norm": 0.6851851851851852,
"acc_norm_stderr": 0.04489931073591312
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.46798029556650245,
"acc_stderr": 0.03510766597959217,
"acc_norm": 0.46798029556650245,
"acc_norm_stderr": 0.03510766597959217
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.635483870967742,
"acc_stderr": 0.02737987122994324,
"acc_norm": 0.635483870967742,
"acc_norm_stderr": 0.02737987122994324
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.8589743589743589,
"acc_stderr": 0.02280138253459753,
"acc_norm": 0.8589743589743589,
"acc_norm_stderr": 0.02280138253459753
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.6075471698113207,
"acc_stderr": 0.03005258057955785,
"acc_norm": 0.6075471698113207,
"acc_norm_stderr": 0.03005258057955785
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.04607582090719976,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.04607582090719976
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3925925925925926,
"acc_stderr": 0.029773847012532967,
"acc_norm": 0.3925925925925926,
"acc_norm_stderr": 0.029773847012532967
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.3708609271523179,
"acc_stderr": 0.03943966699183629,
"acc_norm": 0.3708609271523179,
"acc_norm_stderr": 0.03943966699183629
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.7562189054726368,
"acc_stderr": 0.03036049015401465,
"acc_norm": 0.7562189054726368,
"acc_norm_stderr": 0.03036049015401465
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.5664739884393064,
"acc_stderr": 0.03778621079092055,
"acc_norm": 0.5664739884393064,
"acc_norm_stderr": 0.03778621079092055
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.4576719576719577,
"acc_stderr": 0.02565886886205832,
"acc_norm": 0.4576719576719577,
"acc_norm_stderr": 0.02565886886205832
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.6180555555555556,
"acc_stderr": 0.040629907841466674,
"acc_norm": 0.6180555555555556,
"acc_norm_stderr": 0.040629907841466674
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.78,
"acc_stderr": 0.04163331998932264,
"acc_norm": 0.78,
"acc_norm_stderr": 0.04163331998932264
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.630057803468208,
"acc_stderr": 0.025992472029306393,
"acc_norm": 0.630057803468208,
"acc_norm_stderr": 0.025992472029306393
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.656441717791411,
"acc_stderr": 0.03731133519673893,
"acc_norm": 0.656441717791411,
"acc_norm_stderr": 0.03731133519673893
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.6944444444444444,
"acc_stderr": 0.02563082497562135,
"acc_norm": 0.6944444444444444,
"acc_norm_stderr": 0.02563082497562135
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.7512953367875648,
"acc_stderr": 0.031195840877700304,
"acc_norm": 0.7512953367875648,
"acc_norm_stderr": 0.031195840877700304
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.5,
"acc_stderr": 0.047036043419179864,
"acc_norm": 0.5,
"acc_norm_stderr": 0.047036043419179864
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.7834862385321101,
"acc_stderr": 0.01765871059444314,
"acc_norm": 0.7834862385321101,
"acc_norm_stderr": 0.01765871059444314
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.4365079365079365,
"acc_stderr": 0.04435932892851466,
"acc_norm": 0.4365079365079365,
"acc_norm_stderr": 0.04435932892851466
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.6568627450980392,
"acc_stderr": 0.027184498909941616,
"acc_norm": 0.6568627450980392,
"acc_norm_stderr": 0.027184498909941616
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.73,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.03695980128098824,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.03695980128098824
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.6578947368421053,
"acc_stderr": 0.038607315993160904,
"acc_norm": 0.6578947368421053,
"acc_norm_stderr": 0.038607315993160904
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.6143790849673203,
"acc_stderr": 0.01969145905235404,
"acc_norm": 0.6143790849673203,
"acc_norm_stderr": 0.01969145905235404
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.450354609929078,
"acc_stderr": 0.02968010556502904,
"acc_norm": 0.450354609929078,
"acc_norm_stderr": 0.02968010556502904
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.45535714285714285,
"acc_stderr": 0.04726835553719099,
"acc_norm": 0.45535714285714285,
"acc_norm_stderr": 0.04726835553719099
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.5231481481481481,
"acc_stderr": 0.034063153607115065,
"acc_norm": 0.5231481481481481,
"acc_norm_stderr": 0.034063153607115065
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.45027932960893857,
"acc_stderr": 0.016639615236845817,
"acc_norm": 0.45027932960893857,
"acc_norm_stderr": 0.016639615236845817
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.5735294117647058,
"acc_stderr": 0.030042615832714867,
"acc_norm": 0.5735294117647058,
"acc_norm_stderr": 0.030042615832714867
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.6857142857142857,
"acc_stderr": 0.029719329422417454,
"acc_norm": 0.6857142857142857,
"acc_norm_stderr": 0.029719329422417454
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.759493670886076,
"acc_stderr": 0.027820781981149678,
"acc_norm": 0.759493670886076,
"acc_norm_stderr": 0.027820781981149678
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.46284224250325945,
"acc_stderr": 0.01273492357953206,
"acc_norm": 0.46284224250325945,
"acc_norm_stderr": 0.01273492357953206
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.7107843137254902,
"acc_stderr": 0.031822318676475544,
"acc_norm": 0.7107843137254902,
"acc_norm_stderr": 0.031822318676475544
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.03663974994391242,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.03663974994391242
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.6829865361077111,
"mc1_stderr": 0.01628920337440338,
"mc2": 0.7885502219855742,
"mc2_stderr": 0.013364293397046381
},
"harness|ko_commongen_v2|2": {
"acc": 0.5525383707201889,
"acc_stderr": 0.01709519030150058,
"acc_norm": 0.5702479338842975,
"acc_norm_stderr": 0.01701984753597221
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "JY623/KoSOLRA-10.7B-merge-v2.3",
"model_sha": "7fca6e88fa262cf88251537d1deff1aab6b37a73",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}