results / Edentns /DataVortexS-10.7B-dpo-v0.1 /result_2024-01-10 04:37:18.json
open-ko-llm-bot's picture
Add results for 2024-01-10 04:37:18
a5e071b verified
raw history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.36006825938566556,
"acc_stderr": 0.01402751681458519,
"acc_norm": 0.4786689419795222,
"acc_norm_stderr": 0.014598087973127108
},
"harness|ko_hellaswag|10": {
"acc": 0.3886675960963951,
"acc_stderr": 0.004864513262194307,
"acc_norm": 0.5717984465245967,
"acc_norm_stderr": 0.004938068627349492
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6491228070175439,
"acc_stderr": 0.03660298834049163,
"acc_norm": 0.6491228070175439,
"acc_norm_stderr": 0.03660298834049163
},
"harness|ko_mmlu_management|5": {
"acc": 0.6116504854368932,
"acc_stderr": 0.04825729337356389,
"acc_norm": 0.6116504854368932,
"acc_norm_stderr": 0.04825729337356389
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.6704980842911877,
"acc_stderr": 0.016808322261740442,
"acc_norm": 0.6704980842911877,
"acc_norm_stderr": 0.016808322261740442
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4,
"acc_stderr": 0.04232073695151589,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04232073695151589
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.15,
"acc_stderr": 0.03588702812826372,
"acc_norm": 0.15,
"acc_norm_stderr": 0.03588702812826372
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.5361702127659574,
"acc_stderr": 0.03260038511835771,
"acc_norm": 0.5361702127659574,
"acc_norm_stderr": 0.03260038511835771
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4939759036144578,
"acc_stderr": 0.03892212195333047,
"acc_norm": 0.4939759036144578,
"acc_norm_stderr": 0.03892212195333047
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.6430868167202572,
"acc_stderr": 0.02721042037593402,
"acc_norm": 0.6430868167202572,
"acc_norm_stderr": 0.02721042037593402
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5739910313901345,
"acc_stderr": 0.03318833286217281,
"acc_norm": 0.5739910313901345,
"acc_norm_stderr": 0.03318833286217281
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.6641221374045801,
"acc_stderr": 0.04142313771996664,
"acc_norm": 0.6641221374045801,
"acc_norm_stderr": 0.04142313771996664
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.7070707070707071,
"acc_stderr": 0.032424979581788166,
"acc_norm": 0.7070707070707071,
"acc_norm_stderr": 0.032424979581788166
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5448275862068965,
"acc_stderr": 0.04149886942192118,
"acc_norm": 0.5448275862068965,
"acc_norm_stderr": 0.04149886942192118
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3431372549019608,
"acc_stderr": 0.04724007352383888,
"acc_norm": 0.3431372549019608,
"acc_norm_stderr": 0.04724007352383888
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.6302521008403361,
"acc_stderr": 0.03135709599613591,
"acc_norm": 0.6302521008403361,
"acc_norm_stderr": 0.03135709599613591
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5666666666666667,
"acc_stderr": 0.0251246535258851,
"acc_norm": 0.5666666666666667,
"acc_norm_stderr": 0.0251246535258851
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.58,
"acc_stderr": 0.04960449637488583,
"acc_norm": 0.58,
"acc_norm_stderr": 0.04960449637488583
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6851851851851852,
"acc_stderr": 0.04489931073591312,
"acc_norm": 0.6851851851851852,
"acc_norm_stderr": 0.04489931073591312
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3793103448275862,
"acc_stderr": 0.034139638059062345,
"acc_norm": 0.3793103448275862,
"acc_norm_stderr": 0.034139638059062345
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.6064516129032258,
"acc_stderr": 0.02779187875313226,
"acc_norm": 0.6064516129032258,
"acc_norm_stderr": 0.02779187875313226
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.8162393162393162,
"acc_stderr": 0.025372139671722933,
"acc_norm": 0.8162393162393162,
"acc_norm_stderr": 0.025372139671722933
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5660377358490566,
"acc_stderr": 0.03050329201334259,
"acc_norm": 0.5660377358490566,
"acc_norm_stderr": 0.03050329201334259
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.6272727272727273,
"acc_stderr": 0.04631381319425465,
"acc_norm": 0.6272727272727273,
"acc_norm_stderr": 0.04631381319425465
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3074074074074074,
"acc_stderr": 0.028133252578815635,
"acc_norm": 0.3074074074074074,
"acc_norm_stderr": 0.028133252578815635
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.37748344370860926,
"acc_stderr": 0.039580272311215706,
"acc_norm": 0.37748344370860926,
"acc_norm_stderr": 0.039580272311215706
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.7014925373134329,
"acc_stderr": 0.032357437893550445,
"acc_norm": 0.7014925373134329,
"acc_norm_stderr": 0.032357437893550445
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.5375722543352601,
"acc_stderr": 0.0380168510452446,
"acc_norm": 0.5375722543352601,
"acc_norm_stderr": 0.0380168510452446
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.4312169312169312,
"acc_stderr": 0.025506481698138215,
"acc_norm": 0.4312169312169312,
"acc_norm_stderr": 0.025506481698138215
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.5138888888888888,
"acc_stderr": 0.041795966175810016,
"acc_norm": 0.5138888888888888,
"acc_norm_stderr": 0.041795966175810016
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.6242774566473989,
"acc_stderr": 0.02607431485165708,
"acc_norm": 0.6242774566473989,
"acc_norm_stderr": 0.02607431485165708
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5276073619631901,
"acc_stderr": 0.0392237829061099,
"acc_norm": 0.5276073619631901,
"acc_norm_stderr": 0.0392237829061099
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5895061728395061,
"acc_stderr": 0.027371350925124764,
"acc_norm": 0.5895061728395061,
"acc_norm_stderr": 0.027371350925124764
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.7046632124352331,
"acc_stderr": 0.032922966391551414,
"acc_norm": 0.7046632124352331,
"acc_norm_stderr": 0.032922966391551414
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.47368421052631576,
"acc_stderr": 0.046970851366478626,
"acc_norm": 0.47368421052631576,
"acc_norm_stderr": 0.046970851366478626
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6880733944954128,
"acc_stderr": 0.019862967976707245,
"acc_norm": 0.6880733944954128,
"acc_norm_stderr": 0.019862967976707245
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.044444444444444495,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.044444444444444495
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5980392156862745,
"acc_stderr": 0.02807415894760066,
"acc_norm": 0.5980392156862745,
"acc_norm_stderr": 0.02807415894760066
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.57,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.57,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7603305785123967,
"acc_stderr": 0.03896878985070417,
"acc_norm": 0.7603305785123967,
"acc_norm_stderr": 0.03896878985070417
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5723684210526315,
"acc_stderr": 0.04026097083296563,
"acc_norm": 0.5723684210526315,
"acc_norm_stderr": 0.04026097083296563
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.5098039215686274,
"acc_stderr": 0.02022394600507432,
"acc_norm": 0.5098039215686274,
"acc_norm_stderr": 0.02022394600507432
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.4148936170212766,
"acc_stderr": 0.029392236584612493,
"acc_norm": 0.4148936170212766,
"acc_norm_stderr": 0.029392236584612493
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.41964285714285715,
"acc_stderr": 0.04684099321077106,
"acc_norm": 0.41964285714285715,
"acc_norm_stderr": 0.04684099321077106
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.5694444444444444,
"acc_stderr": 0.03376922151252336,
"acc_norm": 0.5694444444444444,
"acc_norm_stderr": 0.03376922151252336
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2871508379888268,
"acc_stderr": 0.01513160884996376,
"acc_norm": 0.2871508379888268,
"acc_norm_stderr": 0.01513160884996376
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.5036764705882353,
"acc_stderr": 0.0303720158854282,
"acc_norm": 0.5036764705882353,
"acc_norm_stderr": 0.0303720158854282
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.6040816326530613,
"acc_stderr": 0.03130802899065686,
"acc_norm": 0.6040816326530613,
"acc_norm_stderr": 0.03130802899065686
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7510548523206751,
"acc_stderr": 0.028146970599422644,
"acc_norm": 0.7510548523206751,
"acc_norm_stderr": 0.028146970599422644
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.42046936114732725,
"acc_stderr": 0.012607654553832705,
"acc_norm": 0.42046936114732725,
"acc_norm_stderr": 0.012607654553832705
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.696078431372549,
"acc_stderr": 0.03228210387037891,
"acc_norm": 0.696078431372549,
"acc_norm_stderr": 0.03228210387037891
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.703030303030303,
"acc_stderr": 0.035679697722680474,
"acc_norm": 0.703030303030303,
"acc_norm_stderr": 0.035679697722680474
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.3488372093023256,
"mc1_stderr": 0.01668441985998688,
"mc2": 0.5363520506790478,
"mc2_stderr": 0.01620621230829838
},
"harness|ko_commongen_v2|2": {
"acc": 0.48642266824085006,
"acc_stderr": 0.01718401506040145,
"acc_norm": 0.525383707201889,
"acc_norm_stderr": 0.017168187201429246
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Edentns/DataVortexS-10.7B-dpo-v0.1",
"model_sha": "76fafe2757d64b083d4f20b46798d9592827aacf",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}