results / caisarl76 /Mistral-7B-Openorca-cot-2157 /result_2023-10-23 00:34:03.json
open-ko-llm-bot's picture
Add results for 2023-10-23 00:34:03
31717ce
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2226962457337884,
"acc_stderr": 0.012158314774829919,
"acc_norm": 0.2960750853242321,
"acc_norm_stderr": 0.013340916085246254
},
"harness|ko_hellaswag|10": {
"acc": 0.2795259908384784,
"acc_stderr": 0.004478491697891243,
"acc_norm": 0.30870344552877915,
"acc_norm_stderr": 0.004610143575553467
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.39766081871345027,
"acc_stderr": 0.03753638955761691,
"acc_norm": 0.39766081871345027,
"acc_norm_stderr": 0.03753638955761691
},
"harness|ko_mmlu_management|5": {
"acc": 0.34951456310679613,
"acc_stderr": 0.04721188506097173,
"acc_norm": 0.34951456310679613,
"acc_norm_stderr": 0.04721188506097173
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.36909323116219667,
"acc_stderr": 0.01725628310912461,
"acc_norm": 0.36909323116219667,
"acc_norm_stderr": 0.01725628310912461
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04072314811876837,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04072314811876837
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816508,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816508
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3446808510638298,
"acc_stderr": 0.031068985963122155,
"acc_norm": 0.3446808510638298,
"acc_norm_stderr": 0.031068985963122155
},
"harness|ko_mmlu_virology|5": {
"acc": 0.35542168674698793,
"acc_stderr": 0.03726214354322415,
"acc_norm": 0.35542168674698793,
"acc_norm_stderr": 0.03726214354322415
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.34726688102893893,
"acc_stderr": 0.027040745502307336,
"acc_norm": 0.34726688102893893,
"acc_norm_stderr": 0.027040745502307336
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.34977578475336324,
"acc_stderr": 0.03200736719484503,
"acc_norm": 0.34977578475336324,
"acc_norm_stderr": 0.03200736719484503
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.2900763358778626,
"acc_stderr": 0.03980066246467766,
"acc_norm": 0.2900763358778626,
"acc_norm_stderr": 0.03980066246467766
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.31313131313131315,
"acc_stderr": 0.03304205087813652,
"acc_norm": 0.31313131313131315,
"acc_norm_stderr": 0.03304205087813652
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3103448275862069,
"acc_stderr": 0.03855289616378948,
"acc_norm": 0.3103448275862069,
"acc_norm_stderr": 0.03855289616378948
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.20588235294117646,
"acc_stderr": 0.04023382273617746,
"acc_norm": 0.20588235294117646,
"acc_norm_stderr": 0.04023382273617746
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.28991596638655465,
"acc_stderr": 0.02947248583313609,
"acc_norm": 0.28991596638655465,
"acc_norm_stderr": 0.02947248583313609
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.3230769230769231,
"acc_stderr": 0.023710888501970565,
"acc_norm": 0.3230769230769231,
"acc_norm_stderr": 0.023710888501970565
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.39814814814814814,
"acc_stderr": 0.04732332615978814,
"acc_norm": 0.39814814814814814,
"acc_norm_stderr": 0.04732332615978814
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.22167487684729065,
"acc_stderr": 0.029225575892489614,
"acc_norm": 0.22167487684729065,
"acc_norm_stderr": 0.029225575892489614
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.34838709677419355,
"acc_stderr": 0.02710482632810094,
"acc_norm": 0.34838709677419355,
"acc_norm_stderr": 0.02710482632810094
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.5128205128205128,
"acc_stderr": 0.03274531938842351,
"acc_norm": 0.5128205128205128,
"acc_norm_stderr": 0.03274531938842351
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.3169811320754717,
"acc_stderr": 0.028637235639800928,
"acc_norm": 0.3169811320754717,
"acc_norm_stderr": 0.028637235639800928
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.43636363636363634,
"acc_stderr": 0.04750185058907297,
"acc_norm": 0.43636363636363634,
"acc_norm_stderr": 0.04750185058907297
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.026067159222275794,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.026067159222275794
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.271523178807947,
"acc_stderr": 0.036313298039696525,
"acc_norm": 0.271523178807947,
"acc_norm_stderr": 0.036313298039696525
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.43781094527363185,
"acc_stderr": 0.0350808011219984,
"acc_norm": 0.43781094527363185,
"acc_norm_stderr": 0.0350808011219984
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3236994219653179,
"acc_stderr": 0.0356760379963917,
"acc_norm": 0.3236994219653179,
"acc_norm_stderr": 0.0356760379963917
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.30158730158730157,
"acc_stderr": 0.023636975996101813,
"acc_norm": 0.30158730158730157,
"acc_norm_stderr": 0.023636975996101813
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3125,
"acc_stderr": 0.038760854559127644,
"acc_norm": 0.3125,
"acc_norm_stderr": 0.038760854559127644
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.33815028901734107,
"acc_stderr": 0.02546977014940017,
"acc_norm": 0.33815028901734107,
"acc_norm_stderr": 0.02546977014940017
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3558282208588957,
"acc_stderr": 0.03761521380046734,
"acc_norm": 0.3558282208588957,
"acc_norm_stderr": 0.03761521380046734
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.3271604938271605,
"acc_stderr": 0.02610567386140981,
"acc_norm": 0.3271604938271605,
"acc_norm_stderr": 0.02610567386140981
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.35233160621761656,
"acc_stderr": 0.03447478286414358,
"acc_norm": 0.35233160621761656,
"acc_norm_stderr": 0.03447478286414358
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.04303684033537315,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.04303684033537315
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3798165137614679,
"acc_stderr": 0.020808825617866244,
"acc_norm": 0.3798165137614679,
"acc_norm_stderr": 0.020808825617866244
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.29365079365079366,
"acc_stderr": 0.040735243221471255,
"acc_norm": 0.29365079365079366,
"acc_norm_stderr": 0.040735243221471255
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.3202614379084967,
"acc_stderr": 0.026716118380156837,
"acc_norm": 0.3202614379084967,
"acc_norm_stderr": 0.026716118380156837
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.45454545454545453,
"acc_stderr": 0.045454545454545456,
"acc_norm": 0.45454545454545453,
"acc_norm_stderr": 0.045454545454545456
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.2894736842105263,
"acc_stderr": 0.03690677986137283,
"acc_norm": 0.2894736842105263,
"acc_norm_stderr": 0.03690677986137283
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3202614379084967,
"acc_stderr": 0.01887568293806944,
"acc_norm": 0.3202614379084967,
"acc_norm_stderr": 0.01887568293806944
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.30141843971631205,
"acc_stderr": 0.02737412888263115,
"acc_norm": 0.30141843971631205,
"acc_norm_stderr": 0.02737412888263115
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.3482142857142857,
"acc_stderr": 0.04521829902833585,
"acc_norm": 0.3482142857142857,
"acc_norm_stderr": 0.04521829902833585
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.20833333333333334,
"acc_stderr": 0.027696910713093933,
"acc_norm": 0.20833333333333334,
"acc_norm_stderr": 0.027696910713093933
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2569832402234637,
"acc_stderr": 0.014614465821966351,
"acc_norm": 0.2569832402234637,
"acc_norm_stderr": 0.014614465821966351
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.34,
"acc_stderr": 0.047609522856952365,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952365
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.35294117647058826,
"acc_stderr": 0.029029422815681404,
"acc_norm": 0.35294117647058826,
"acc_norm_stderr": 0.029029422815681404
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.22040816326530613,
"acc_stderr": 0.026537045312145312,
"acc_norm": 0.22040816326530613,
"acc_norm_stderr": 0.026537045312145312
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.47257383966244726,
"acc_stderr": 0.03249822718301303,
"acc_norm": 0.47257383966244726,
"acc_norm_stderr": 0.03249822718301303
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2542372881355932,
"acc_stderr": 0.011121129007840676,
"acc_norm": 0.2542372881355932,
"acc_norm_stderr": 0.011121129007840676
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.3137254901960784,
"acc_stderr": 0.032566854844603886,
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.032566854844603886
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.40606060606060607,
"acc_stderr": 0.03834816355401181,
"acc_norm": 0.40606060606060607,
"acc_norm_stderr": 0.03834816355401181
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2582619339045288,
"mc1_stderr": 0.015321821688476189,
"mc2": 0.4874892521316813,
"mc2_stderr": 0.017011135502882097
},
"harness|ko_commongen_v2|2": {
"acc": 0.16646989374262103,
"acc_stderr": 0.01280687925641312,
"acc_norm": 0.2833530106257379,
"acc_norm_stderr": 0.015492852084597233
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "caisarl76/Mistral-7B-Openorca-cot-2157",
"model_sha": "eaf722c66f6bbb64f7f43d08bc9de3b36be29d2b",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}