results / Edentns /DataVortexS-10.7B-dpo-v1.11 /result_2024-02-02 00:07:20.json
open-ko-llm-bot's picture
Add results for 2024-02-02 00:07:20
a60802d verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.5,
"acc_stderr": 0.014611390804670088,
"acc_norm": 0.5597269624573379,
"acc_norm_stderr": 0.014506769524804251
},
"harness|ko_hellaswag|10": {
"acc": 0.4976100378410675,
"acc_stderr": 0.004989724408664502,
"acc_norm": 0.6868153754232225,
"acc_norm_stderr": 0.004628409084218777
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6023391812865497,
"acc_stderr": 0.0375363895576169,
"acc_norm": 0.6023391812865497,
"acc_norm_stderr": 0.0375363895576169
},
"harness|ko_mmlu_management|5": {
"acc": 0.6504854368932039,
"acc_stderr": 0.047211885060971716,
"acc_norm": 0.6504854368932039,
"acc_norm_stderr": 0.047211885060971716
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.6538952745849298,
"acc_stderr": 0.017011965266412077,
"acc_norm": 0.6538952745849298,
"acc_norm_stderr": 0.017011965266412077
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.43703703703703706,
"acc_stderr": 0.04284958639753399,
"acc_norm": 0.43703703703703706,
"acc_norm_stderr": 0.04284958639753399
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4595744680851064,
"acc_stderr": 0.032579014820998356,
"acc_norm": 0.4595744680851064,
"acc_norm_stderr": 0.032579014820998356
},
"harness|ko_mmlu_virology|5": {
"acc": 0.43373493975903615,
"acc_stderr": 0.03858158940685515,
"acc_norm": 0.43373493975903615,
"acc_norm_stderr": 0.03858158940685515
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.6109324758842444,
"acc_stderr": 0.027690337536485376,
"acc_norm": 0.6109324758842444,
"acc_norm_stderr": 0.027690337536485376
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5695067264573991,
"acc_stderr": 0.033231973029429394,
"acc_norm": 0.5695067264573991,
"acc_norm_stderr": 0.033231973029429394
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.6335877862595419,
"acc_stderr": 0.04225875451969638,
"acc_norm": 0.6335877862595419,
"acc_norm_stderr": 0.04225875451969638
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.7070707070707071,
"acc_stderr": 0.032424979581788166,
"acc_norm": 0.7070707070707071,
"acc_norm_stderr": 0.032424979581788166
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.42758620689655175,
"acc_stderr": 0.041227371113703316,
"acc_norm": 0.42758620689655175,
"acc_norm_stderr": 0.041227371113703316
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.39215686274509803,
"acc_stderr": 0.048580835742663454,
"acc_norm": 0.39215686274509803,
"acc_norm_stderr": 0.048580835742663454
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.6260504201680672,
"acc_stderr": 0.03142946637883708,
"acc_norm": 0.6260504201680672,
"acc_norm_stderr": 0.03142946637883708
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5666666666666667,
"acc_stderr": 0.0251246535258851,
"acc_norm": 0.5666666666666667,
"acc_norm_stderr": 0.0251246535258851
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.6,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.4,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6018518518518519,
"acc_stderr": 0.04732332615978814,
"acc_norm": 0.6018518518518519,
"acc_norm_stderr": 0.04732332615978814
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4039408866995074,
"acc_stderr": 0.03452453903822039,
"acc_norm": 0.4039408866995074,
"acc_norm_stderr": 0.03452453903822039
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5870967741935483,
"acc_stderr": 0.028009138125400384,
"acc_norm": 0.5870967741935483,
"acc_norm_stderr": 0.028009138125400384
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7521367521367521,
"acc_stderr": 0.028286324075564424,
"acc_norm": 0.7521367521367521,
"acc_norm_stderr": 0.028286324075564424
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5169811320754717,
"acc_stderr": 0.030755120364119898,
"acc_norm": 0.5169811320754717,
"acc_norm_stderr": 0.030755120364119898
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5727272727272728,
"acc_stderr": 0.04738198703545483,
"acc_norm": 0.5727272727272728,
"acc_norm_stderr": 0.04738198703545483
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.028742040903948482,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.028742040903948482
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2980132450331126,
"acc_stderr": 0.037345356767871984,
"acc_norm": 0.2980132450331126,
"acc_norm_stderr": 0.037345356767871984
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6716417910447762,
"acc_stderr": 0.033206858897443244,
"acc_norm": 0.6716417910447762,
"acc_norm_stderr": 0.033206858897443244
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.48554913294797686,
"acc_stderr": 0.03810871630454764,
"acc_norm": 0.48554913294797686,
"acc_norm_stderr": 0.03810871630454764
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.42063492063492064,
"acc_stderr": 0.025424835086924003,
"acc_norm": 0.42063492063492064,
"acc_norm_stderr": 0.025424835086924003
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.5277777777777778,
"acc_stderr": 0.04174752578923185,
"acc_norm": 0.5277777777777778,
"acc_norm_stderr": 0.04174752578923185
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.66,
"acc_stderr": 0.04760952285695237,
"acc_norm": 0.66,
"acc_norm_stderr": 0.04760952285695237
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.523121387283237,
"acc_stderr": 0.026890297881303125,
"acc_norm": 0.523121387283237,
"acc_norm_stderr": 0.026890297881303125
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5214723926380368,
"acc_stderr": 0.03924746876751129,
"acc_norm": 0.5214723926380368,
"acc_norm_stderr": 0.03924746876751129
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5864197530864198,
"acc_stderr": 0.027402042040269966,
"acc_norm": 0.5864197530864198,
"acc_norm_stderr": 0.027402042040269966
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720683,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720683
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.7046632124352331,
"acc_stderr": 0.032922966391551414,
"acc_norm": 0.7046632124352331,
"acc_norm_stderr": 0.032922966391551414
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.47368421052631576,
"acc_stderr": 0.046970851366478626,
"acc_norm": 0.47368421052631576,
"acc_norm_stderr": 0.046970851366478626
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6770642201834862,
"acc_stderr": 0.02004811592341532,
"acc_norm": 0.6770642201834862,
"acc_norm_stderr": 0.02004811592341532
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.38095238095238093,
"acc_stderr": 0.043435254289490965,
"acc_norm": 0.38095238095238093,
"acc_norm_stderr": 0.043435254289490965
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5718954248366013,
"acc_stderr": 0.028332397483664278,
"acc_norm": 0.5718954248366013,
"acc_norm_stderr": 0.028332397483664278
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7024793388429752,
"acc_stderr": 0.04173349148083499,
"acc_norm": 0.7024793388429752,
"acc_norm_stderr": 0.04173349148083499
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5855263157894737,
"acc_stderr": 0.040089737857792046,
"acc_norm": 0.5855263157894737,
"acc_norm_stderr": 0.040089737857792046
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.5098039215686274,
"acc_stderr": 0.020223946005074312,
"acc_norm": 0.5098039215686274,
"acc_norm_stderr": 0.020223946005074312
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3546099290780142,
"acc_stderr": 0.02853865002887864,
"acc_norm": 0.3546099290780142,
"acc_norm_stderr": 0.02853865002887864
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.39285714285714285,
"acc_stderr": 0.04635550135609976,
"acc_norm": 0.39285714285714285,
"acc_norm_stderr": 0.04635550135609976
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.5092592592592593,
"acc_stderr": 0.03409386946992699,
"acc_norm": 0.5092592592592593,
"acc_norm_stderr": 0.03409386946992699
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.4,
"acc_stderr": 0.016384638410380823,
"acc_norm": 0.4,
"acc_norm_stderr": 0.016384638410380823
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4889705882352941,
"acc_stderr": 0.030365446477275668,
"acc_norm": 0.4889705882352941,
"acc_norm_stderr": 0.030365446477275668
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5918367346938775,
"acc_stderr": 0.03146465712827424,
"acc_norm": 0.5918367346938775,
"acc_norm_stderr": 0.03146465712827424
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7426160337552743,
"acc_stderr": 0.02845882099146031,
"acc_norm": 0.7426160337552743,
"acc_norm_stderr": 0.02845882099146031
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.43741851368970014,
"acc_stderr": 0.012669813464935719,
"acc_norm": 0.43741851368970014,
"acc_norm_stderr": 0.012669813464935719
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6274509803921569,
"acc_stderr": 0.03393388584958406,
"acc_norm": 0.6274509803921569,
"acc_norm_stderr": 0.03393388584958406
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.03681050869161549,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.03681050869161549
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.47980416156670747,
"mc1_stderr": 0.017489216849737043,
"mc2": 0.6674146470053609,
"mc2_stderr": 0.01573182499922601
},
"harness|ko_commongen_v2|2": {
"acc": 0.5312868949232585,
"acc_stderr": 0.017156666859785456,
"acc_norm": 0.5371900826446281,
"acc_norm_stderr": 0.0171427361176433
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Edentns/DataVortexS-10.7B-dpo-v1.11",
"model_sha": "7cde644083fce16dcd1666c536a4dd1438a3c0a3",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}