results / Edentns /DataVortexS-10.7B-dpo-v1.7 /result_2024-01-29 04:15:16.json
open-ko-llm-bot's picture
Add results for 2024-01-29 04:15:16
681c81b verified
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.4974402730375427,
"acc_stderr": 0.014611199329843784,
"acc_norm": 0.5554607508532423,
"acc_norm_stderr": 0.01452122640562708
},
"harness|ko_hellaswag|10": {
"acc": 0.45937064329814775,
"acc_stderr": 0.004973280417705513,
"acc_norm": 0.6339374626568413,
"acc_norm_stderr": 0.004807423343224586
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6374269005847953,
"acc_stderr": 0.0368713061556206,
"acc_norm": 0.6374269005847953,
"acc_norm_stderr": 0.0368713061556206
},
"harness|ko_mmlu_management|5": {
"acc": 0.6601941747572816,
"acc_stderr": 0.04689765937278135,
"acc_norm": 0.6601941747572816,
"acc_norm_stderr": 0.04689765937278135
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.70242656449553,
"acc_stderr": 0.01634911191290943,
"acc_norm": 0.70242656449553,
"acc_norm_stderr": 0.01634911191290943
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4740740740740741,
"acc_stderr": 0.04313531696750573,
"acc_norm": 0.4740740740740741,
"acc_norm_stderr": 0.04313531696750573
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4765957446808511,
"acc_stderr": 0.03265019475033583,
"acc_norm": 0.4765957446808511,
"acc_norm_stderr": 0.03265019475033583
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4457831325301205,
"acc_stderr": 0.03869543323472101,
"acc_norm": 0.4457831325301205,
"acc_norm_stderr": 0.03869543323472101
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5691318327974276,
"acc_stderr": 0.028125340983972714,
"acc_norm": 0.5691318327974276,
"acc_norm_stderr": 0.028125340983972714
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5515695067264574,
"acc_stderr": 0.03337883736255098,
"acc_norm": 0.5515695067264574,
"acc_norm_stderr": 0.03337883736255098
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.549618320610687,
"acc_stderr": 0.04363643698524779,
"acc_norm": 0.549618320610687,
"acc_norm_stderr": 0.04363643698524779
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.48,
"acc_stderr": 0.05021167315686779,
"acc_norm": 0.48,
"acc_norm_stderr": 0.05021167315686779
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6767676767676768,
"acc_stderr": 0.033322999210706444,
"acc_norm": 0.6767676767676768,
"acc_norm_stderr": 0.033322999210706444
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.496551724137931,
"acc_stderr": 0.041665675771015785,
"acc_norm": 0.496551724137931,
"acc_norm_stderr": 0.041665675771015785
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.04389869956808778,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.04389869956808778
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5,
"acc_stderr": 0.032478490123081544,
"acc_norm": 0.5,
"acc_norm_stderr": 0.032478490123081544
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5538461538461539,
"acc_stderr": 0.025203571773028323,
"acc_norm": 0.5538461538461539,
"acc_norm_stderr": 0.025203571773028323
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001974
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5833333333333334,
"acc_stderr": 0.04766075165356461,
"acc_norm": 0.5833333333333334,
"acc_norm_stderr": 0.04766075165356461
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.39901477832512317,
"acc_stderr": 0.03445487686264715,
"acc_norm": 0.39901477832512317,
"acc_norm_stderr": 0.03445487686264715
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.6129032258064516,
"acc_stderr": 0.027709359675032495,
"acc_norm": 0.6129032258064516,
"acc_norm_stderr": 0.027709359675032495
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.8034188034188035,
"acc_stderr": 0.02603538609895129,
"acc_norm": 0.8034188034188035,
"acc_norm_stderr": 0.02603538609895129
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5320754716981132,
"acc_stderr": 0.03070948699255654,
"acc_norm": 0.5320754716981132,
"acc_norm_stderr": 0.03070948699255654
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5545454545454546,
"acc_stderr": 0.047605488214603246,
"acc_norm": 0.5545454545454546,
"acc_norm_stderr": 0.047605488214603246
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3814814814814815,
"acc_stderr": 0.029616718927497596,
"acc_norm": 0.3814814814814815,
"acc_norm_stderr": 0.029616718927497596
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.4105960264900662,
"acc_stderr": 0.04016689594849929,
"acc_norm": 0.4105960264900662,
"acc_norm_stderr": 0.04016689594849929
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.681592039800995,
"acc_stderr": 0.03294118479054096,
"acc_norm": 0.681592039800995,
"acc_norm_stderr": 0.03294118479054096
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.4508670520231214,
"acc_stderr": 0.037940126746970296,
"acc_norm": 0.4508670520231214,
"acc_norm_stderr": 0.037940126746970296
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.36243386243386244,
"acc_stderr": 0.02475747390275206,
"acc_norm": 0.36243386243386244,
"acc_norm_stderr": 0.02475747390275206
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.5069444444444444,
"acc_stderr": 0.04180806750294938,
"acc_norm": 0.5069444444444444,
"acc_norm_stderr": 0.04180806750294938
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.76,
"acc_stderr": 0.04292346959909283,
"acc_norm": 0.76,
"acc_norm_stderr": 0.04292346959909283
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5549132947976878,
"acc_stderr": 0.026756255129663765,
"acc_norm": 0.5549132947976878,
"acc_norm_stderr": 0.026756255129663765
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5337423312883436,
"acc_stderr": 0.039194155450484096,
"acc_norm": 0.5337423312883436,
"acc_norm_stderr": 0.039194155450484096
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5370370370370371,
"acc_stderr": 0.027744313443376536,
"acc_norm": 0.5370370370370371,
"acc_norm_stderr": 0.027744313443376536
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.6580310880829016,
"acc_stderr": 0.03423465100104283,
"acc_norm": 0.6580310880829016,
"acc_norm_stderr": 0.03423465100104283
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.37719298245614036,
"acc_stderr": 0.04559522141958215,
"acc_norm": 0.37719298245614036,
"acc_norm_stderr": 0.04559522141958215
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.636697247706422,
"acc_stderr": 0.020620603919625807,
"acc_norm": 0.636697247706422,
"acc_norm_stderr": 0.020620603919625807
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.373015873015873,
"acc_stderr": 0.04325506042017086,
"acc_norm": 0.373015873015873,
"acc_norm_stderr": 0.04325506042017086
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5294117647058824,
"acc_stderr": 0.02858034106513829,
"acc_norm": 0.5294117647058824,
"acc_norm_stderr": 0.02858034106513829
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7024793388429752,
"acc_stderr": 0.04173349148083499,
"acc_norm": 0.7024793388429752,
"acc_norm_stderr": 0.04173349148083499
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5789473684210527,
"acc_stderr": 0.040179012759817494,
"acc_norm": 0.5789473684210527,
"acc_norm_stderr": 0.040179012759817494
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.46895424836601307,
"acc_stderr": 0.020188804456361887,
"acc_norm": 0.46895424836601307,
"acc_norm_stderr": 0.020188804456361887
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.32978723404255317,
"acc_stderr": 0.0280459469420424,
"acc_norm": 0.32978723404255317,
"acc_norm_stderr": 0.0280459469420424
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.35714285714285715,
"acc_stderr": 0.04547960999764376,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.04547960999764376
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.03350991604696044,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.03350991604696044
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2346368715083799,
"acc_stderr": 0.014173044098303675,
"acc_norm": 0.2346368715083799,
"acc_norm_stderr": 0.014173044098303675
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.62,
"acc_norm_stderr": 0.048783173121456316
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4227941176470588,
"acc_stderr": 0.030008562845003466,
"acc_norm": 0.4227941176470588,
"acc_norm_stderr": 0.030008562845003466
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5877551020408164,
"acc_stderr": 0.031512360446742695,
"acc_norm": 0.5877551020408164,
"acc_norm_stderr": 0.031512360446742695
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7046413502109705,
"acc_stderr": 0.029696338713422882,
"acc_norm": 0.7046413502109705,
"acc_norm_stderr": 0.029696338713422882
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.35658409387222945,
"acc_stderr": 0.01223364298927389,
"acc_norm": 0.35658409387222945,
"acc_norm_stderr": 0.01223364298927389
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6470588235294118,
"acc_stderr": 0.03354092437591519,
"acc_norm": 0.6470588235294118,
"acc_norm_stderr": 0.03354092437591519
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6181818181818182,
"acc_stderr": 0.03793713171165633,
"acc_norm": 0.6181818181818182,
"acc_norm_stderr": 0.03793713171165633
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.31701346389228885,
"mc1_stderr": 0.016289203374403392,
"mc2": 0.48234441684091955,
"mc2_stderr": 0.015365663323313775
},
"harness|ko_commongen_v2|2": {
"acc": 0.5938606847697757,
"acc_stderr": 0.016884749503191396,
"acc_norm": 0.6033057851239669,
"acc_norm_stderr": 0.016819438642971404
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Edentns/DataVortexS-10.7B-dpo-v1.7",
"model_sha": "85af13a7e6002cee79c1b0be9cd0c93fd18d723e",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}