results / ITT-AF /ITT-42dot_LLM-PLM-1.3B-v5.0 /result_2024-03-05 02:04:33.json
open-ko-llm-bot's picture
Add results for 2024-03-05 02:04:33
78cf235 verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2815699658703072,
"acc_stderr": 0.013143376735009015,
"acc_norm": 0.33532423208191126,
"acc_norm_stderr": 0.013796182947785566
},
"harness|ko_hellaswag|10": {
"acc": 0.360884285998805,
"acc_stderr": 0.0047927552358235275,
"acc_norm": 0.45956980681139215,
"acc_norm_stderr": 0.004973442060741621
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.22807017543859648,
"acc_stderr": 0.03218093795602357,
"acc_norm": 0.22807017543859648,
"acc_norm_stderr": 0.03218093795602357
},
"harness|ko_mmlu_management|5": {
"acc": 0.17475728155339806,
"acc_stderr": 0.0376017800602662,
"acc_norm": 0.17475728155339806,
"acc_norm_stderr": 0.0376017800602662
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.25798212005108556,
"acc_stderr": 0.01564583018834895,
"acc_norm": 0.25798212005108556,
"acc_norm_stderr": 0.01564583018834895
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.3037037037037037,
"acc_stderr": 0.03972552884785137,
"acc_norm": 0.3037037037037037,
"acc_norm_stderr": 0.03972552884785137
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.28085106382978725,
"acc_stderr": 0.029379170464124818,
"acc_norm": 0.28085106382978725,
"acc_norm_stderr": 0.029379170464124818
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3373493975903614,
"acc_stderr": 0.0368078369072758,
"acc_norm": 0.3373493975903614,
"acc_norm_stderr": 0.0368078369072758
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.2765273311897106,
"acc_stderr": 0.02540383297817961,
"acc_norm": 0.2765273311897106,
"acc_norm_stderr": 0.02540383297817961
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.25112107623318386,
"acc_stderr": 0.0291052208332246,
"acc_norm": 0.25112107623318386,
"acc_norm_stderr": 0.0291052208332246
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.25190839694656486,
"acc_stderr": 0.03807387116306086,
"acc_norm": 0.25190839694656486,
"acc_norm_stderr": 0.03807387116306086
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.18686868686868688,
"acc_stderr": 0.027772533334218974,
"acc_norm": 0.18686868686868688,
"acc_norm_stderr": 0.027772533334218974
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.296551724137931,
"acc_stderr": 0.038061426873099935,
"acc_norm": 0.296551724137931,
"acc_norm_stderr": 0.038061426873099935
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.0438986995680878,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.0438986995680878
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.25630252100840334,
"acc_stderr": 0.028359620870533946,
"acc_norm": 0.25630252100840334,
"acc_norm_stderr": 0.028359620870533946
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2358974358974359,
"acc_stderr": 0.021525965407408726,
"acc_norm": 0.2358974358974359,
"acc_norm_stderr": 0.021525965407408726
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.04414343666854932,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.04414343666854932
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.17733990147783252,
"acc_stderr": 0.026874337276808342,
"acc_norm": 0.17733990147783252,
"acc_norm_stderr": 0.026874337276808342
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2645161290322581,
"acc_stderr": 0.025091892378859275,
"acc_norm": 0.2645161290322581,
"acc_norm_stderr": 0.025091892378859275
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.24358974358974358,
"acc_stderr": 0.028120966503914404,
"acc_norm": 0.24358974358974358,
"acc_norm_stderr": 0.028120966503914404
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.24150943396226415,
"acc_stderr": 0.026341480371118352,
"acc_norm": 0.24150943396226415,
"acc_norm_stderr": 0.026341480371118352
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.2909090909090909,
"acc_stderr": 0.04350271442923243,
"acc_norm": 0.2909090909090909,
"acc_norm_stderr": 0.04350271442923243
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.23333333333333334,
"acc_stderr": 0.025787874220959336,
"acc_norm": 0.23333333333333334,
"acc_norm_stderr": 0.025787874220959336
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.038020397601079024,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.038020397601079024
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.25870646766169153,
"acc_stderr": 0.03096590312357304,
"acc_norm": 0.25870646766169153,
"acc_norm_stderr": 0.03096590312357304
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.19653179190751446,
"acc_stderr": 0.030299574664788137,
"acc_norm": 0.19653179190751446,
"acc_norm_stderr": 0.030299574664788137
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.24338624338624337,
"acc_stderr": 0.022101128787415415,
"acc_norm": 0.24338624338624337,
"acc_norm_stderr": 0.022101128787415415
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.25,
"acc_stderr": 0.03621034121889507,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03621034121889507
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.22,
"acc_stderr": 0.0416333199893227,
"acc_norm": 0.22,
"acc_norm_stderr": 0.0416333199893227
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.21965317919075145,
"acc_stderr": 0.022289638852617904,
"acc_norm": 0.21965317919075145,
"acc_norm_stderr": 0.022289638852617904
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.2331288343558282,
"acc_stderr": 0.0332201579577674,
"acc_norm": 0.2331288343558282,
"acc_norm_stderr": 0.0332201579577674
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.2716049382716049,
"acc_stderr": 0.02474862449053737,
"acc_norm": 0.2716049382716049,
"acc_norm_stderr": 0.02474862449053737
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.26424870466321243,
"acc_stderr": 0.03182155050916646,
"acc_norm": 0.26424870466321243,
"acc_norm_stderr": 0.03182155050916646
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.04303684033537316,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.04303684033537316
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.24036697247706423,
"acc_stderr": 0.01832060732096407,
"acc_norm": 0.24036697247706423,
"acc_norm_stderr": 0.01832060732096407
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.040061680838488774,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.040061680838488774
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.02564686309713791,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.02564686309713791
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.36363636363636365,
"acc_stderr": 0.04391326286724071,
"acc_norm": 0.36363636363636365,
"acc_norm_stderr": 0.04391326286724071
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.19736842105263158,
"acc_stderr": 0.03238981601699397,
"acc_norm": 0.19736842105263158,
"acc_norm_stderr": 0.03238981601699397
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.24019607843137256,
"acc_stderr": 0.017282760695167432,
"acc_norm": 0.24019607843137256,
"acc_norm_stderr": 0.017282760695167432
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.26595744680851063,
"acc_stderr": 0.02635806569888059,
"acc_norm": 0.26595744680851063,
"acc_norm_stderr": 0.02635806569888059
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.24107142857142858,
"acc_stderr": 0.04059867246952687,
"acc_norm": 0.24107142857142858,
"acc_norm_stderr": 0.04059867246952687
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3287037037037037,
"acc_stderr": 0.03203614084670058,
"acc_norm": 0.3287037037037037,
"acc_norm_stderr": 0.03203614084670058
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.27262569832402234,
"acc_stderr": 0.014893391735249608,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249608
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4338235294117647,
"acc_stderr": 0.03010563657001664,
"acc_norm": 0.4338235294117647,
"acc_norm_stderr": 0.03010563657001664
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.2,
"acc_stderr": 0.025607375986579164,
"acc_norm": 0.2,
"acc_norm_stderr": 0.025607375986579164
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.26582278481012656,
"acc_stderr": 0.028756799629658342,
"acc_norm": 0.26582278481012656,
"acc_norm_stderr": 0.028756799629658342
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.24185136897001303,
"acc_stderr": 0.010936550813827065,
"acc_norm": 0.24185136897001303,
"acc_norm_stderr": 0.010936550813827065
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.24019607843137256,
"acc_stderr": 0.02998373305591362,
"acc_norm": 0.24019607843137256,
"acc_norm_stderr": 0.02998373305591362
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.2606060606060606,
"acc_stderr": 0.03427743175816524,
"acc_norm": 0.2606060606060606,
"acc_norm_stderr": 0.03427743175816524
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.3072215422276622,
"mc1_stderr": 0.01615020132132301,
"mc2": 0.4406215488693316,
"mc2_stderr": 0.014875277546161027
},
"harness|ko_commongen_v2|2": {
"acc": 0.27036599763872493,
"acc_stderr": 0.015270152942068405,
"acc_norm": 0.33293978748524206,
"acc_norm_stderr": 0.016202431208373797
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-v5.0",
"model_sha": "3ac945231c72cd9d5edda7cf0121907ad11198e9",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}