results / Edentns /DataVortexS-10.7B-dpo-v1.12 /result_2024-02-02 00:04:10.json
open-ko-llm-bot's picture
Add results for 2024-02-02 00:04:10
e994c95 verified
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.5042662116040956,
"acc_stderr": 0.014610858923956959,
"acc_norm": 0.5443686006825939,
"acc_norm_stderr": 0.01455374993930687
},
"harness|ko_hellaswag|10": {
"acc": 0.4971121290579566,
"acc_stderr": 0.004989698183207843,
"acc_norm": 0.6720772754431388,
"acc_norm_stderr": 0.00468497069690295
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6257309941520468,
"acc_stderr": 0.03711601185389482,
"acc_norm": 0.6257309941520468,
"acc_norm_stderr": 0.03711601185389482
},
"harness|ko_mmlu_management|5": {
"acc": 0.6796116504854369,
"acc_stderr": 0.04620284082280042,
"acc_norm": 0.6796116504854369,
"acc_norm_stderr": 0.04620284082280042
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.6641123882503193,
"acc_stderr": 0.016889407235171683,
"acc_norm": 0.6641123882503193,
"acc_norm_stderr": 0.016889407235171683
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4148148148148148,
"acc_stderr": 0.04256193767901407,
"acc_norm": 0.4148148148148148,
"acc_norm_stderr": 0.04256193767901407
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4595744680851064,
"acc_stderr": 0.03257901482099836,
"acc_norm": 0.4595744680851064,
"acc_norm_stderr": 0.03257901482099836
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4939759036144578,
"acc_stderr": 0.03892212195333045,
"acc_norm": 0.4939759036144578,
"acc_norm_stderr": 0.03892212195333045
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5852090032154341,
"acc_stderr": 0.02798268045975956,
"acc_norm": 0.5852090032154341,
"acc_norm_stderr": 0.02798268045975956
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5964125560538116,
"acc_stderr": 0.032928028193303135,
"acc_norm": 0.5964125560538116,
"acc_norm_stderr": 0.032928028193303135
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5954198473282443,
"acc_stderr": 0.043046937953806645,
"acc_norm": 0.5954198473282443,
"acc_norm_stderr": 0.043046937953806645
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.45,
"acc_stderr": 0.04999999999999999,
"acc_norm": 0.45,
"acc_norm_stderr": 0.04999999999999999
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.7121212121212122,
"acc_stderr": 0.03225883512300992,
"acc_norm": 0.7121212121212122,
"acc_norm_stderr": 0.03225883512300992
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.41379310344827586,
"acc_stderr": 0.04104269211806232,
"acc_norm": 0.41379310344827586,
"acc_norm_stderr": 0.04104269211806232
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.35294117647058826,
"acc_stderr": 0.047551296160629475,
"acc_norm": 0.35294117647058826,
"acc_norm_stderr": 0.047551296160629475
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.6302521008403361,
"acc_stderr": 0.03135709599613591,
"acc_norm": 0.6302521008403361,
"acc_norm_stderr": 0.03135709599613591
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5820512820512821,
"acc_stderr": 0.025007329882461207,
"acc_norm": 0.5820512820512821,
"acc_norm_stderr": 0.025007329882461207
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.64,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.64,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.04557239513497751,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.04557239513497751
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.03481904844438803,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.03481904844438803
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.567741935483871,
"acc_stderr": 0.0281817397200194,
"acc_norm": 0.567741935483871,
"acc_norm_stderr": 0.0281817397200194
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7863247863247863,
"acc_stderr": 0.026853450377009144,
"acc_norm": 0.7863247863247863,
"acc_norm_stderr": 0.026853450377009144
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5584905660377358,
"acc_stderr": 0.030561590426731837,
"acc_norm": 0.5584905660377358,
"acc_norm_stderr": 0.030561590426731837
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5818181818181818,
"acc_stderr": 0.04724577405731573,
"acc_norm": 0.5818181818181818,
"acc_norm_stderr": 0.04724577405731573
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.35555555555555557,
"acc_stderr": 0.02918571494985741,
"acc_norm": 0.35555555555555557,
"acc_norm_stderr": 0.02918571494985741
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.3443708609271523,
"acc_stderr": 0.038796870240733264,
"acc_norm": 0.3443708609271523,
"acc_norm_stderr": 0.038796870240733264
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6716417910447762,
"acc_stderr": 0.033206858897443244,
"acc_norm": 0.6716417910447762,
"acc_norm_stderr": 0.033206858897443244
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.4277456647398844,
"acc_stderr": 0.03772446857518026,
"acc_norm": 0.4277456647398844,
"acc_norm_stderr": 0.03772446857518026
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.47619047619047616,
"acc_stderr": 0.02572209706438851,
"acc_norm": 0.47619047619047616,
"acc_norm_stderr": 0.02572209706438851
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.04155319955593146,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.04155319955593146
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542126,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542126
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5606936416184971,
"acc_stderr": 0.026720034380514998,
"acc_norm": 0.5606936416184971,
"acc_norm_stderr": 0.026720034380514998
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.48466257668711654,
"acc_stderr": 0.03926522378708843,
"acc_norm": 0.48466257668711654,
"acc_norm_stderr": 0.03926522378708843
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.6234567901234568,
"acc_stderr": 0.026959344518747784,
"acc_norm": 0.6234567901234568,
"acc_norm_stderr": 0.026959344518747784
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.6994818652849741,
"acc_stderr": 0.03308818594415749,
"acc_norm": 0.6994818652849741,
"acc_norm_stderr": 0.03308818594415749
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.43859649122807015,
"acc_stderr": 0.04668000738510455,
"acc_norm": 0.43859649122807015,
"acc_norm_stderr": 0.04668000738510455
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6880733944954128,
"acc_stderr": 0.019862967976707245,
"acc_norm": 0.6880733944954128,
"acc_norm_stderr": 0.019862967976707245
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.48412698412698413,
"acc_stderr": 0.04469881854072606,
"acc_norm": 0.48412698412698413,
"acc_norm_stderr": 0.04469881854072606
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5882352941176471,
"acc_stderr": 0.028180596328259287,
"acc_norm": 0.5882352941176471,
"acc_norm_stderr": 0.028180596328259287
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.58,
"acc_stderr": 0.04960449637488583,
"acc_norm": 0.58,
"acc_norm_stderr": 0.04960449637488583
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.040655781409087044,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.040655781409087044
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.625,
"acc_stderr": 0.039397364351956274,
"acc_norm": 0.625,
"acc_norm_stderr": 0.039397364351956274
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.5032679738562091,
"acc_stderr": 0.020227402794434864,
"acc_norm": 0.5032679738562091,
"acc_norm_stderr": 0.020227402794434864
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.41134751773049644,
"acc_stderr": 0.029354911159940975,
"acc_norm": 0.41134751773049644,
"acc_norm_stderr": 0.029354911159940975
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.4107142857142857,
"acc_stderr": 0.046695106638751926,
"acc_norm": 0.4107142857142857,
"acc_norm_stderr": 0.046695106638751926
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.5,
"acc_stderr": 0.034099716973523674,
"acc_norm": 0.5,
"acc_norm_stderr": 0.034099716973523674
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.32513966480446926,
"acc_stderr": 0.015666542785053566,
"acc_norm": 0.32513966480446926,
"acc_norm_stderr": 0.015666542785053566
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.71,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.71,
"acc_norm_stderr": 0.04560480215720684
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4742647058823529,
"acc_stderr": 0.03033257809455504,
"acc_norm": 0.4742647058823529,
"acc_norm_stderr": 0.03033257809455504
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5387755102040817,
"acc_stderr": 0.031912820526692774,
"acc_norm": 0.5387755102040817,
"acc_norm_stderr": 0.031912820526692774
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.8059071729957806,
"acc_stderr": 0.02574490253229094,
"acc_norm": 0.8059071729957806,
"acc_norm_stderr": 0.02574490253229094
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.4028683181225554,
"acc_stderr": 0.012526955577118007,
"acc_norm": 0.4028683181225554,
"acc_norm_stderr": 0.012526955577118007
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6862745098039216,
"acc_stderr": 0.03256685484460387,
"acc_norm": 0.6862745098039216,
"acc_norm_stderr": 0.03256685484460387
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.7090909090909091,
"acc_stderr": 0.03546563019624336,
"acc_norm": 0.7090909090909091,
"acc_norm_stderr": 0.03546563019624336
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.4394124847001224,
"mc1_stderr": 0.01737452048251371,
"mc2": 0.6187514885795848,
"mc2_stderr": 0.01610052121364447
},
"harness|ko_commongen_v2|2": {
"acc": 0.4982290436835891,
"acc_stderr": 0.017190246276231863,
"acc_norm": 0.5041322314049587,
"acc_norm_stderr": 0.017189767032130817
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Edentns/DataVortexS-10.7B-dpo-v1.12",
"model_sha": "31e67196838657abed64641859c0a95db65bba95",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}