results / JONGYUN /DPO_Test_2 /result_2024-02-21 04:41:16.json
open-ko-llm-bot's picture
Add results for 2024-02-21 04:41:16
f97eee9 verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.46928327645051193,
"acc_stderr": 0.014583792546304037,
"acc_norm": 0.5255972696245734,
"acc_norm_stderr": 0.014592230885298967
},
"harness|ko_hellaswag|10": {
"acc": 0.4645488946425015,
"acc_stderr": 0.0049772234853420316,
"acc_norm": 0.6356303525194185,
"acc_norm_stderr": 0.00480269410620365
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6432748538011696,
"acc_stderr": 0.03674013002860954,
"acc_norm": 0.6432748538011696,
"acc_norm_stderr": 0.03674013002860954
},
"harness|ko_mmlu_management|5": {
"acc": 0.5922330097087378,
"acc_stderr": 0.048657775704107696,
"acc_norm": 0.5922330097087378,
"acc_norm_stderr": 0.048657775704107696
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.665389527458493,
"acc_stderr": 0.01687346864159216,
"acc_norm": 0.665389527458493,
"acc_norm_stderr": 0.01687346864159216
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.45925925925925926,
"acc_stderr": 0.04304979692464245,
"acc_norm": 0.45925925925925926,
"acc_norm_stderr": 0.04304979692464245
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252603,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252603
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4978723404255319,
"acc_stderr": 0.032685726586674915,
"acc_norm": 0.4978723404255319,
"acc_norm_stderr": 0.032685726586674915
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4819277108433735,
"acc_stderr": 0.03889951252827216,
"acc_norm": 0.4819277108433735,
"acc_norm_stderr": 0.03889951252827216
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.6463022508038585,
"acc_stderr": 0.02715520810320088,
"acc_norm": 0.6463022508038585,
"acc_norm_stderr": 0.02715520810320088
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.6233183856502242,
"acc_stderr": 0.032521134899291884,
"acc_norm": 0.6233183856502242,
"acc_norm_stderr": 0.032521134899291884
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.6564885496183206,
"acc_stderr": 0.041649760719448786,
"acc_norm": 0.6564885496183206,
"acc_norm_stderr": 0.041649760719448786
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.7171717171717171,
"acc_stderr": 0.03208779558786752,
"acc_norm": 0.7171717171717171,
"acc_norm_stderr": 0.03208779558786752
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5103448275862069,
"acc_stderr": 0.04165774775728763,
"acc_norm": 0.5103448275862069,
"acc_norm_stderr": 0.04165774775728763
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.30392156862745096,
"acc_stderr": 0.045766654032077615,
"acc_norm": 0.30392156862745096,
"acc_norm_stderr": 0.045766654032077615
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5882352941176471,
"acc_stderr": 0.031968769891957786,
"acc_norm": 0.5882352941176471,
"acc_norm_stderr": 0.031968769891957786
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5102564102564102,
"acc_stderr": 0.025345672221942374,
"acc_norm": 0.5102564102564102,
"acc_norm_stderr": 0.025345672221942374
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.64,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.64,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.04557239513497751,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.04557239513497751
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4433497536945813,
"acc_stderr": 0.03495334582162934,
"acc_norm": 0.4433497536945813,
"acc_norm_stderr": 0.03495334582162934
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5741935483870968,
"acc_stderr": 0.028129112709165904,
"acc_norm": 0.5741935483870968,
"acc_norm_stderr": 0.028129112709165904
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.8076923076923077,
"acc_stderr": 0.025819233256483706,
"acc_norm": 0.8076923076923077,
"acc_norm_stderr": 0.025819233256483706
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5283018867924528,
"acc_stderr": 0.030723535249006107,
"acc_norm": 0.5283018867924528,
"acc_norm_stderr": 0.030723535249006107
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.6272727272727273,
"acc_stderr": 0.04631381319425465,
"acc_norm": 0.6272727272727273,
"acc_norm_stderr": 0.04631381319425465
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3037037037037037,
"acc_stderr": 0.028037929969114986,
"acc_norm": 0.3037037037037037,
"acc_norm_stderr": 0.028037929969114986
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.33774834437086093,
"acc_stderr": 0.038615575462551684,
"acc_norm": 0.33774834437086093,
"acc_norm_stderr": 0.038615575462551684
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.7263681592039801,
"acc_stderr": 0.03152439186555404,
"acc_norm": 0.7263681592039801,
"acc_norm_stderr": 0.03152439186555404
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.5028901734104047,
"acc_stderr": 0.038124005659748335,
"acc_norm": 0.5028901734104047,
"acc_norm_stderr": 0.038124005659748335
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.41534391534391535,
"acc_stderr": 0.025379524910778405,
"acc_norm": 0.41534391534391535,
"acc_norm_stderr": 0.025379524910778405
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.5347222222222222,
"acc_stderr": 0.04171115858181618,
"acc_norm": 0.5347222222222222,
"acc_norm_stderr": 0.04171115858181618
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.615606936416185,
"acc_stderr": 0.026189666966272035,
"acc_norm": 0.615606936416185,
"acc_norm_stderr": 0.026189666966272035
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5398773006134969,
"acc_stderr": 0.03915857291436972,
"acc_norm": 0.5398773006134969,
"acc_norm_stderr": 0.03915857291436972
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.6141975308641975,
"acc_stderr": 0.027085401226132143,
"acc_norm": 0.6141975308641975,
"acc_norm_stderr": 0.027085401226132143
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.6994818652849741,
"acc_stderr": 0.0330881859441575,
"acc_norm": 0.6994818652849741,
"acc_norm_stderr": 0.0330881859441575
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.43859649122807015,
"acc_stderr": 0.04668000738510455,
"acc_norm": 0.43859649122807015,
"acc_norm_stderr": 0.04668000738510455
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6752293577981652,
"acc_stderr": 0.020077729109310324,
"acc_norm": 0.6752293577981652,
"acc_norm_stderr": 0.020077729109310324
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.04360314860077459,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.04360314860077459
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5980392156862745,
"acc_stderr": 0.02807415894760066,
"acc_norm": 0.5980392156862745,
"acc_norm_stderr": 0.02807415894760066
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.61,
"acc_stderr": 0.049020713000019756,
"acc_norm": 0.61,
"acc_norm_stderr": 0.049020713000019756
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.040655781409087044,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.040655781409087044
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5855263157894737,
"acc_stderr": 0.04008973785779205,
"acc_norm": 0.5855263157894737,
"acc_norm_stderr": 0.04008973785779205
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.5294117647058824,
"acc_stderr": 0.020192808271433788,
"acc_norm": 0.5294117647058824,
"acc_norm_stderr": 0.020192808271433788
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.39361702127659576,
"acc_stderr": 0.02914454478159614,
"acc_norm": 0.39361702127659576,
"acc_norm_stderr": 0.02914454478159614
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.3482142857142857,
"acc_stderr": 0.04521829902833586,
"acc_norm": 0.3482142857142857,
"acc_norm_stderr": 0.04521829902833586
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4537037037037037,
"acc_stderr": 0.03395322726375797,
"acc_norm": 0.4537037037037037,
"acc_norm_stderr": 0.03395322726375797
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2245810055865922,
"acc_stderr": 0.013956803666544637,
"acc_norm": 0.2245810055865922,
"acc_norm_stderr": 0.013956803666544637
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4632352941176471,
"acc_stderr": 0.03029061918048569,
"acc_norm": 0.4632352941176471,
"acc_norm_stderr": 0.03029061918048569
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.689795918367347,
"acc_stderr": 0.02961345987248438,
"acc_norm": 0.689795918367347,
"acc_norm_stderr": 0.02961345987248438
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7679324894514767,
"acc_stderr": 0.027479744550808507,
"acc_norm": 0.7679324894514767,
"acc_norm_stderr": 0.027479744550808507
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.39765319426336376,
"acc_stderr": 0.012499840347460643,
"acc_norm": 0.39765319426336376,
"acc_norm_stderr": 0.012499840347460643
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.7303921568627451,
"acc_stderr": 0.03114557065948678,
"acc_norm": 0.7303921568627451,
"acc_norm_stderr": 0.03114557065948678
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.7151515151515152,
"acc_stderr": 0.03524390844511781,
"acc_norm": 0.7151515151515152,
"acc_norm_stderr": 0.03524390844511781
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.3329253365973072,
"mc1_stderr": 0.016497402382012052,
"mc2": 0.4947134696523839,
"mc2_stderr": 0.01547234495228754
},
"harness|ko_commongen_v2|2": {
"acc": 0.5997638724911453,
"acc_stderr": 0.016844693510505035,
"acc_norm": 0.6186540731995277,
"acc_norm_stderr": 0.016699301768828077
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "JONGYUN/DPO_Test_2",
"model_sha": "d7cabe3ab37f15fe28f43bac6c63b94f4da561e8",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}