results / GritLM /GritLM-7B-KTO /result_2024-06-17 02:09:20.json
choco9966's picture
Add results for 2024-06-17 02:09:20
f3d129a verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3242320819112628,
"acc_stderr": 0.01367881039951882,
"acc_norm": 0.37372013651877134,
"acc_norm_stderr": 0.014137708601759077
},
"harness|ko_hellaswag|10": {
"acc": 0.3664608643696475,
"acc_stderr": 0.004808526802718588,
"acc_norm": 0.47122087233618803,
"acc_norm_stderr": 0.004981509099276354
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4327485380116959,
"acc_stderr": 0.03799978644370607,
"acc_norm": 0.4327485380116959,
"acc_norm_stderr": 0.03799978644370607
},
"harness|ko_mmlu_management|5": {
"acc": 0.5728155339805825,
"acc_stderr": 0.04897957737781168,
"acc_norm": 0.5728155339805825,
"acc_norm_stderr": 0.04897957737781168
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.45977011494252873,
"acc_stderr": 0.01782199409693354,
"acc_norm": 0.45977011494252873,
"acc_norm_stderr": 0.01782199409693354
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.04094376269996794,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.04094376269996794
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.37872340425531914,
"acc_stderr": 0.03170995606040655,
"acc_norm": 0.37872340425531914,
"acc_norm_stderr": 0.03170995606040655
},
"harness|ko_mmlu_virology|5": {
"acc": 0.39156626506024095,
"acc_stderr": 0.03799857454479636,
"acc_norm": 0.39156626506024095,
"acc_norm_stderr": 0.03799857454479636
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4919614147909968,
"acc_stderr": 0.028394421370984545,
"acc_norm": 0.4919614147909968,
"acc_norm_stderr": 0.028394421370984545
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.48878923766816146,
"acc_stderr": 0.033549366530984746,
"acc_norm": 0.48878923766816146,
"acc_norm_stderr": 0.033549366530984746
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.37404580152671757,
"acc_stderr": 0.042438692422305246,
"acc_norm": 0.37404580152671757,
"acc_norm_stderr": 0.042438692422305246
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5202020202020202,
"acc_stderr": 0.03559443565563918,
"acc_norm": 0.5202020202020202,
"acc_norm_stderr": 0.03559443565563918
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.43448275862068964,
"acc_stderr": 0.041307408795554966,
"acc_norm": 0.43448275862068964,
"acc_norm_stderr": 0.041307408795554966
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.04488482852329017,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.04488482852329017
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.4789915966386555,
"acc_stderr": 0.032449808499900284,
"acc_norm": 0.4789915966386555,
"acc_norm_stderr": 0.032449808499900284
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.43846153846153846,
"acc_stderr": 0.025158266016868557,
"acc_norm": 0.43846153846153846,
"acc_norm_stderr": 0.025158266016868557
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.04803752235190193,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.04803752235190193
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3891625615763547,
"acc_stderr": 0.03430462416103872,
"acc_norm": 0.3891625615763547,
"acc_norm_stderr": 0.03430462416103872
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.45483870967741935,
"acc_stderr": 0.028327743091561063,
"acc_norm": 0.45483870967741935,
"acc_norm_stderr": 0.028327743091561063
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.030882736974138663,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.030882736974138663
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.45660377358490567,
"acc_stderr": 0.03065674869673943,
"acc_norm": 0.45660377358490567,
"acc_norm_stderr": 0.03065674869673943
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5,
"acc_stderr": 0.04789131426105757,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04789131426105757
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.32222222222222224,
"acc_stderr": 0.0284934650910286,
"acc_norm": 0.32222222222222224,
"acc_norm_stderr": 0.0284934650910286
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2582781456953642,
"acc_stderr": 0.035737053147634576,
"acc_norm": 0.2582781456953642,
"acc_norm_stderr": 0.035737053147634576
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5920398009950248,
"acc_stderr": 0.03475116365194092,
"acc_norm": 0.5920398009950248,
"acc_norm_stderr": 0.03475116365194092
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3699421965317919,
"acc_stderr": 0.036812296333943194,
"acc_norm": 0.3699421965317919,
"acc_norm_stderr": 0.036812296333943194
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3386243386243386,
"acc_stderr": 0.024373197867983056,
"acc_norm": 0.3386243386243386,
"acc_norm_stderr": 0.024373197867983056
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3611111111111111,
"acc_stderr": 0.040166600304512336,
"acc_norm": 0.3611111111111111,
"acc_norm_stderr": 0.040166600304512336
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620333
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.48265895953757226,
"acc_stderr": 0.026902900458666647,
"acc_norm": 0.48265895953757226,
"acc_norm_stderr": 0.026902900458666647
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.44785276073619634,
"acc_stderr": 0.03906947479456602,
"acc_norm": 0.44785276073619634,
"acc_norm_stderr": 0.03906947479456602
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.42592592592592593,
"acc_stderr": 0.02751374728437942,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.02751374728437942
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.47668393782383417,
"acc_stderr": 0.03604513672442206,
"acc_norm": 0.47668393782383417,
"acc_norm_stderr": 0.03604513672442206
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.30701754385964913,
"acc_stderr": 0.0433913832257986,
"acc_norm": 0.30701754385964913,
"acc_norm_stderr": 0.0433913832257986
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.47155963302752296,
"acc_stderr": 0.021402615697348044,
"acc_norm": 0.47155963302752296,
"acc_norm_stderr": 0.021402615697348044
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.0442626668137991,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.0442626668137991
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.477124183006536,
"acc_stderr": 0.028599936776089782,
"acc_norm": 0.477124183006536,
"acc_norm_stderr": 0.028599936776089782
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.44,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.44,
"acc_norm_stderr": 0.049888765156985884
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.043913262867240704,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.043913262867240704
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.40131578947368424,
"acc_stderr": 0.03988903703336285,
"acc_norm": 0.40131578947368424,
"acc_norm_stderr": 0.03988903703336285
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.40032679738562094,
"acc_stderr": 0.019821843688271775,
"acc_norm": 0.40032679738562094,
"acc_norm_stderr": 0.019821843688271775
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.32269503546099293,
"acc_stderr": 0.02788913930053479,
"acc_norm": 0.32269503546099293,
"acc_norm_stderr": 0.02788913930053479
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.44642857142857145,
"acc_stderr": 0.04718471485219588,
"acc_norm": 0.44642857142857145,
"acc_norm_stderr": 0.04718471485219588
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.375,
"acc_stderr": 0.033016908987210894,
"acc_norm": 0.375,
"acc_norm_stderr": 0.033016908987210894
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.288268156424581,
"acc_stderr": 0.015149132860209422,
"acc_norm": 0.288268156424581,
"acc_norm_stderr": 0.015149132860209422
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3161764705882353,
"acc_stderr": 0.028245687391462916,
"acc_norm": 0.3161764705882353,
"acc_norm_stderr": 0.028245687391462916
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.49387755102040815,
"acc_stderr": 0.03200682020163909,
"acc_norm": 0.49387755102040815,
"acc_norm_stderr": 0.03200682020163909
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5822784810126582,
"acc_stderr": 0.032103530322412685,
"acc_norm": 0.5822784810126582,
"acc_norm_stderr": 0.032103530322412685
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3161668839634941,
"acc_stderr": 0.011875780894386578,
"acc_norm": 0.3161668839634941,
"acc_norm_stderr": 0.011875780894386578
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4068627450980392,
"acc_stderr": 0.03447891136353382,
"acc_norm": 0.4068627450980392,
"acc_norm_stderr": 0.03447891136353382
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.41818181818181815,
"acc_stderr": 0.03851716319398396,
"acc_norm": 0.41818181818181815,
"acc_norm_stderr": 0.03851716319398396
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.3023255813953488,
"mc1_stderr": 0.016077509266133036,
"mc2": 0.4868855375869235,
"mc2_stderr": 0.015699074117298418
},
"harness|ko_commongen_v2|2": {
"acc": 0.3825265643447462,
"acc_stderr": 0.016709165387228817,
"acc_norm": 0.4025974025974026,
"acc_norm_stderr": 0.01686102048640778
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "GritLM/GritLM-7B-KTO",
"model_sha": "b5c48669508c1de18c698460c187f64e90e7df44",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}