config_general
dict | results
dict | versions
dict | config_tasks
dict | summary_tasks
dict | summary_general
dict |
---|---|---|---|---|---|
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 145877.357026936,
"end_time": 167502.554312204,
"total_evaluation_time_secondes": "21625.19728526802",
"model_name": "JaeyeonKang/CCK_Asura_v1",
"model_sha": "7dd3ddea090bd63f3143e70d7d6237cc40c046e4",
"model_dtype": "torch.float16",
"model_size": "129.73 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.7013651877133106,
"acc_stderr": 0.013374078615068749,
"acc_norm": 0.7389078498293515,
"acc_norm_stderr": 0.012835523909473848
},
"harness|hellaswag|10": {
"acc": 0.719577773351922,
"acc_stderr": 0.004482874732237349,
"acc_norm": 0.8906592312288388,
"acc_norm_stderr": 0.003114285077228029
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.7111111111111111,
"acc_stderr": 0.03915450630414251,
"acc_norm": 0.7111111111111111,
"acc_norm_stderr": 0.03915450630414251
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.8552631578947368,
"acc_stderr": 0.0286319518459304,
"acc_norm": 0.8552631578947368,
"acc_norm_stderr": 0.0286319518459304
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.8,
"acc_stderr": 0.04020151261036844,
"acc_norm": 0.8,
"acc_norm_stderr": 0.04020151261036844
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.8075471698113208,
"acc_stderr": 0.024262979839372267,
"acc_norm": 0.8075471698113208,
"acc_norm_stderr": 0.024262979839372267
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.8888888888888888,
"acc_stderr": 0.026280550932848087,
"acc_norm": 0.8888888888888888,
"acc_norm_stderr": 0.026280550932848087
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.57,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.57,
"acc_norm_stderr": 0.04975698519562428
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.43,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.43,
"acc_norm_stderr": 0.04975698519562428
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.7514450867052023,
"acc_stderr": 0.03295304696818318,
"acc_norm": 0.7514450867052023,
"acc_norm_stderr": 0.03295304696818318
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4803921568627451,
"acc_stderr": 0.04971358884367406,
"acc_norm": 0.4803921568627451,
"acc_norm_stderr": 0.04971358884367406
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.84,
"acc_stderr": 0.03684529491774708,
"acc_norm": 0.84,
"acc_norm_stderr": 0.03684529491774708
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.7404255319148936,
"acc_stderr": 0.02865917937429232,
"acc_norm": 0.7404255319148936,
"acc_norm_stderr": 0.02865917937429232
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5964912280701754,
"acc_stderr": 0.04615186962583707,
"acc_norm": 0.5964912280701754,
"acc_norm_stderr": 0.04615186962583707
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.7241379310344828,
"acc_stderr": 0.03724563619774632,
"acc_norm": 0.7241379310344828,
"acc_norm_stderr": 0.03724563619774632
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.5343915343915344,
"acc_stderr": 0.02569032176249385,
"acc_norm": 0.5343915343915344,
"acc_norm_stderr": 0.02569032176249385
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5714285714285714,
"acc_stderr": 0.04426266681379909,
"acc_norm": 0.5714285714285714,
"acc_norm_stderr": 0.04426266681379909
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.864516129032258,
"acc_stderr": 0.019469334586486933,
"acc_norm": 0.864516129032258,
"acc_norm_stderr": 0.019469334586486933
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6206896551724138,
"acc_stderr": 0.034139638059062345,
"acc_norm": 0.6206896551724138,
"acc_norm_stderr": 0.034139638059062345
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.87,
"acc_stderr": 0.03379976689896309,
"acc_norm": 0.87,
"acc_norm_stderr": 0.03379976689896309
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8363636363636363,
"acc_stderr": 0.02888787239548795,
"acc_norm": 0.8363636363636363,
"acc_norm_stderr": 0.02888787239548795
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9040404040404041,
"acc_stderr": 0.020984808610047933,
"acc_norm": 0.9040404040404041,
"acc_norm_stderr": 0.020984808610047933
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9430051813471503,
"acc_stderr": 0.016731085293607558,
"acc_norm": 0.9430051813471503,
"acc_norm_stderr": 0.016731085293607558
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.7897435897435897,
"acc_stderr": 0.020660597485026945,
"acc_norm": 0.7897435897435897,
"acc_norm_stderr": 0.020660597485026945
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.42592592592592593,
"acc_stderr": 0.030149135601365944,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.030149135601365944
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.865546218487395,
"acc_stderr": 0.022159373072744442,
"acc_norm": 0.865546218487395,
"acc_norm_stderr": 0.022159373072744442
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.5099337748344371,
"acc_stderr": 0.04081677107248436,
"acc_norm": 0.5099337748344371,
"acc_norm_stderr": 0.04081677107248436
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9192660550458716,
"acc_stderr": 0.011680172292862086,
"acc_norm": 0.9192660550458716,
"acc_norm_stderr": 0.011680172292862086
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6990740740740741,
"acc_stderr": 0.031280390843298804,
"acc_norm": 0.6990740740740741,
"acc_norm_stderr": 0.031280390843298804
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.9313725490196079,
"acc_stderr": 0.017744453647073315,
"acc_norm": 0.9313725490196079,
"acc_norm_stderr": 0.017744453647073315
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.9240506329113924,
"acc_stderr": 0.0172446332510657,
"acc_norm": 0.9240506329113924,
"acc_norm_stderr": 0.0172446332510657
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7982062780269058,
"acc_stderr": 0.02693611191280227,
"acc_norm": 0.7982062780269058,
"acc_norm_stderr": 0.02693611191280227
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8778625954198473,
"acc_stderr": 0.02871877688934232,
"acc_norm": 0.8778625954198473,
"acc_norm_stderr": 0.02871877688934232
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.9090909090909091,
"acc_stderr": 0.026243194054073878,
"acc_norm": 0.9090909090909091,
"acc_norm_stderr": 0.026243194054073878
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8796296296296297,
"acc_stderr": 0.0314570385430625,
"acc_norm": 0.8796296296296297,
"acc_norm_stderr": 0.0314570385430625
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8159509202453987,
"acc_stderr": 0.030446777687971723,
"acc_norm": 0.8159509202453987,
"acc_norm_stderr": 0.030446777687971723
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.6339285714285714,
"acc_stderr": 0.0457237235873743,
"acc_norm": 0.6339285714285714,
"acc_norm_stderr": 0.0457237235873743
},
"harness|hendrycksTest-management|5": {
"acc": 0.8737864077669902,
"acc_stderr": 0.03288180278808629,
"acc_norm": 0.8737864077669902,
"acc_norm_stderr": 0.03288180278808629
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9273504273504274,
"acc_stderr": 0.01700436856813237,
"acc_norm": 0.9273504273504274,
"acc_norm_stderr": 0.01700436856813237
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8914431673052363,
"acc_stderr": 0.011124283175851183,
"acc_norm": 0.8914431673052363,
"acc_norm_stderr": 0.011124283175851183
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.8439306358381503,
"acc_stderr": 0.019539014685374036,
"acc_norm": 0.8439306358381503,
"acc_norm_stderr": 0.019539014685374036
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.6636871508379888,
"acc_stderr": 0.0158010037291459,
"acc_norm": 0.6636871508379888,
"acc_norm_stderr": 0.0158010037291459
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8300653594771242,
"acc_stderr": 0.02150538312123138,
"acc_norm": 0.8300653594771242,
"acc_norm_stderr": 0.02150538312123138
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.819935691318328,
"acc_stderr": 0.02182342285774494,
"acc_norm": 0.819935691318328,
"acc_norm_stderr": 0.02182342285774494
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8487654320987654,
"acc_stderr": 0.019935086092149886,
"acc_norm": 0.8487654320987654,
"acc_norm_stderr": 0.019935086092149886
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.6134751773049646,
"acc_stderr": 0.02904919034254347,
"acc_norm": 0.6134751773049646,
"acc_norm_stderr": 0.02904919034254347
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.589960886571056,
"acc_stderr": 0.012561837621962032,
"acc_norm": 0.589960886571056,
"acc_norm_stderr": 0.012561837621962032
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.8235294117647058,
"acc_stderr": 0.023157468308559345,
"acc_norm": 0.8235294117647058,
"acc_norm_stderr": 0.023157468308559345
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.8218954248366013,
"acc_stderr": 0.015478369653108568,
"acc_norm": 0.8218954248366013,
"acc_norm_stderr": 0.015478369653108568
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7181818181818181,
"acc_stderr": 0.04309118709946458,
"acc_norm": 0.7181818181818181,
"acc_norm_stderr": 0.04309118709946458
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.8244897959183674,
"acc_stderr": 0.02435280072297001,
"acc_norm": 0.8244897959183674,
"acc_norm_stderr": 0.02435280072297001
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.9154228855721394,
"acc_stderr": 0.019675343217199173,
"acc_norm": 0.9154228855721394,
"acc_norm_stderr": 0.019675343217199173
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.95,
"acc_stderr": 0.021904291355759057,
"acc_norm": 0.95,
"acc_norm_stderr": 0.021904291355759057
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5783132530120482,
"acc_stderr": 0.03844453181770917,
"acc_norm": 0.5783132530120482,
"acc_norm_stderr": 0.03844453181770917
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8771929824561403,
"acc_stderr": 0.02517298435015577,
"acc_norm": 0.8771929824561403,
"acc_norm_stderr": 0.02517298435015577
},
"harness|truthfulqa:mc|0": {
"mc1": 0.565483476132191,
"mc1_stderr": 0.017352738749259564,
"mc2": 0.7174856574663107,
"mc2_stderr": 0.014605715133518151
},
"harness|winogrande|5": {
"acc": 0.8634569850039463,
"acc_stderr": 0.0096502429002916
},
"harness|gsm8k|5": {
"acc": 0.6808188021228203,
"acc_stderr": 0.012840345676251653
},
"all": {
"acc": 0.7535469467828841,
"acc_stderr": 0.028473742983492905,
"acc_norm": 0.7564527472308834,
"acc_norm_stderr": 0.029025433712812198,
"mc1": 0.565483476132191,
"mc1_stderr": 0.017352738749259564,
"mc2": 0.7174856574663107,
"mc2_stderr": 0.014605715133518151
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "ca48d52265c0051f",
"hash_cont_tokens": "e8abf848493b50f7"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4687,
"non_padded": 0,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "4975ded0ed31f702",
"hash_cont_tokens": "9fe0a5c42e1532db"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40019,
"non_padded": 149,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "8ff523ec326d5d55",
"hash_cont_tokens": "50421e30bef398f9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "742bd6a389a8ef40",
"hash_cont_tokens": "f11971a765cb609f"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "aa9743839c83bd9f",
"hash_cont_tokens": "440a970fadecdc7b"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "60f6ed52e2a2987a",
"hash_cont_tokens": "50421e30bef398f9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "6080d9f3c5930be0",
"hash_cont_tokens": "7ecd60c25b9bfe5b"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "873319724ad65589",
"hash_cont_tokens": "875cde3af7a0ee14"
},
"truncated": 0,
"non_truncated": 144,
"padded": 564,
"non_padded": 12,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "8366d04d12b154a7",
"hash_cont_tokens": "50421e30bef398f9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "1724a282fb269fd7",
"hash_cont_tokens": "50421e30bef398f9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "b7aa815781eae172",
"hash_cont_tokens": "50421e30bef398f9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "0003d13e86bc8c1a",
"hash_cont_tokens": "702fb6d82ff0d6ac"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "32b28762dd077c78",
"hash_cont_tokens": "f7b8097afc16a47c"
},
"truncated": 0,
"non_truncated": 102,
"padded": 404,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "19dd0e1895125d49",
"hash_cont_tokens": "50421e30bef398f9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "761c7ce187b3338a",
"hash_cont_tokens": "aa0e8bc655f2f641"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "dae74024ebc12b2b",
"hash_cont_tokens": "b1cc6e7e9fcd3827"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "5fa8050688a246ed",
"hash_cont_tokens": "2425a3f084a591ef"
},
"truncated": 0,
"non_truncated": 145,
"padded": 580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "2da3f8d7d1515cc6",
"hash_cont_tokens": "bd87bf0c060fd925"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "907de61bbe46dada",
"hash_cont_tokens": "eb8932890e0605db"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "d7549fe9ac133643",
"hash_cont_tokens": "50421e30bef398f9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "b449ae8cd622fb96",
"hash_cont_tokens": "1ddcb86d28cde266"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "a447bd1574b5e26c",
"hash_cont_tokens": "176c8dcff38c5f8f"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "56312a0c3d85ae90",
"hash_cont_tokens": "50421e30bef398f9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "5002f4ac8b1562ca",
"hash_cont_tokens": "674fc454bdc5ac93"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "b4f9efd054b0149d",
"hash_cont_tokens": "03a5012b916274ea"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "6e010d01707b5a01",
"hash_cont_tokens": "873d2aab226ba1d8"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "fc1f6e824ba386d7",
"hash_cont_tokens": "c583432ad27fcfe0"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "3a485a40c8432ece",
"hash_cont_tokens": "d7907b61bcb8c123"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "a7dd9ca4bbda3752",
"hash_cont_tokens": "f47f041de50333b9"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "d7ea631399a73865",
"hash_cont_tokens": "0d56317b3e5eedb5"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "d12816cf88146011",
"hash_cont_tokens": "09ba1243e7390c0f"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "9763ecaef4814c21",
"hash_cont_tokens": "9cc29889c3d3f77d"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "c639cce12a46ebad",
"hash_cont_tokens": "cdd0b3dc06d933e5"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "b9762065cce6f3a6",
"hash_cont_tokens": "e02816433ff28daf"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "84157fee0b6d0f3c",
"hash_cont_tokens": "142a4a8a1138a214"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "ade303e1ae3c016f",
"hash_cont_tokens": "bc54813e809b796d"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "e5482e1c23c23d35",
"hash_cont_tokens": "8ea8c5ff76a15bca"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "4415eeb9bad0507b",
"hash_cont_tokens": "e3a8cd951b6e3469"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "e6b5271422ecbaa8",
"hash_cont_tokens": "3e9e0bdc248fd88a"
},
"truncated": 0,
"non_truncated": 163,
"padded": 644,
"non_padded": 8,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "e719cb83196977d8",
"hash_cont_tokens": "55b12fb138c6a064"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "155da0e62b39e804",
"hash_cont_tokens": "a01d6d39a83c4597"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "38466c242259e6d3",
"hash_cont_tokens": "6aeaed4d823c98aa"
},
"truncated": 0,
"non_truncated": 234,
"padded": 932,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "0dd129e92538a7f6",
"hash_cont_tokens": "50421e30bef398f9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "d108a883fc3e022f",
"hash_cont_tokens": "9b0ab02a64603081"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "0e7b7df82884a2d5",
"hash_cont_tokens": "3b8bbe9108e55ce9"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1364,
"non_padded": 20,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "7c220f5613cd8426",
"hash_cont_tokens": "3e9bfc0362e97330"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "35de1609a9a763a9",
"hash_cont_tokens": "23b2dc6ee2da4cfc"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "a1dcfa9c80490d06",
"hash_cont_tokens": "9f6ff69d23a48783"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "a091cf645d2415e0",
"hash_cont_tokens": "d6458d743d875837"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "e9df32a33f85290c",
"hash_cont_tokens": "922a195f53a35662"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1128,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "c9f7583fff66d361",
"hash_cont_tokens": "2e590029ef41fbcd"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "40a933f829116f8d",
"hash_cont_tokens": "7cfee54dbddd5a98"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "0f6a92c3a2062b48",
"hash_cont_tokens": "a86677b2a45c20e1"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "29a08e9bfbe9b2f0",
"hash_cont_tokens": "0d756ccaae031757"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "32a03f1f22a6e103",
"hash_cont_tokens": "b2229bc2cfbf594b"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "1de5c52d2b2831d7",
"hash_cont_tokens": "c3a3bdfd177eed5b"
},
"truncated": 0,
"non_truncated": 201,
"padded": 800,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "add924961f7f4146",
"hash_cont_tokens": "50421e30bef398f9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "e0653601c466b1bc",
"hash_cont_tokens": "af8b3658088cb37f"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "ac600d612445156d",
"hash_cont_tokens": "060118bef6de4e0a"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "a03ce28b7fd06aa7",
"hash_cont_tokens": "f5da56a132aab151"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "72067255e368e24e",
"hash_cont_tokens": "f08975ad6f2d5864"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "bda342e47b5099b2",
"hash_cont_tokens": "696e9690b146bd30"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "a8fa53915153e1db",
"hash_cont_tokens": "62b683fb5cadf0a1"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113348,
"non_padded": 1524,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 3973041.324489038,
"end_time": 4009208.078815405,
"total_evaluation_time_secondes": "36166.75432636682",
"model_name": "abacusai/Smaug-72B-v0.1",
"model_sha": "54a8c35600ec5cb30ca2129247854ece23e57f57",
"model_dtype": "torch.bfloat16",
"model_size": "135.9 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.735494880546075,
"acc_stderr": 0.012889272949313371,
"acc_norm": 0.7602389078498294,
"acc_norm_stderr": 0.012476304127453944
},
"harness|hellaswag|10": {
"acc": 0.7199761003784106,
"acc_stderr": 0.004480929450281562,
"acc_norm": 0.8926508663612827,
"acc_norm_stderr": 0.0030892396746331585
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.7185185185185186,
"acc_stderr": 0.038850042458002526,
"acc_norm": 0.7185185185185186,
"acc_norm_stderr": 0.038850042458002526
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.881578947368421,
"acc_stderr": 0.026293995855474928,
"acc_norm": 0.881578947368421,
"acc_norm_stderr": 0.026293995855474928
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.82,
"acc_stderr": 0.038612291966536955,
"acc_norm": 0.82,
"acc_norm_stderr": 0.038612291966536955
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.8452830188679246,
"acc_stderr": 0.022257075558791282,
"acc_norm": 0.8452830188679246,
"acc_norm_stderr": 0.022257075558791282
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.9305555555555556,
"acc_stderr": 0.021257974822832048,
"acc_norm": 0.9305555555555556,
"acc_norm_stderr": 0.021257974822832048
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.62,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.62,
"acc_norm_stderr": 0.04878317312145633
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.55,
"acc_stderr": 0.049999999999999996,
"acc_norm": 0.55,
"acc_norm_stderr": 0.049999999999999996
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.7456647398843931,
"acc_stderr": 0.0332055644308557,
"acc_norm": 0.7456647398843931,
"acc_norm_stderr": 0.0332055644308557
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.5686274509803921,
"acc_stderr": 0.04928099597287534,
"acc_norm": 0.5686274509803921,
"acc_norm_stderr": 0.04928099597287534
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.81,
"acc_stderr": 0.03942772444036622,
"acc_norm": 0.81,
"acc_norm_stderr": 0.03942772444036622
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.7914893617021277,
"acc_stderr": 0.026556982117838728,
"acc_norm": 0.7914893617021277,
"acc_norm_stderr": 0.026556982117838728
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.6140350877192983,
"acc_stderr": 0.04579639422070434,
"acc_norm": 0.6140350877192983,
"acc_norm_stderr": 0.04579639422070434
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.7724137931034483,
"acc_stderr": 0.03493950380131184,
"acc_norm": 0.7724137931034483,
"acc_norm_stderr": 0.03493950380131184
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.6904761904761905,
"acc_stderr": 0.023809523809523864,
"acc_norm": 0.6904761904761905,
"acc_norm_stderr": 0.023809523809523864
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5714285714285714,
"acc_stderr": 0.04426266681379909,
"acc_norm": 0.5714285714285714,
"acc_norm_stderr": 0.04426266681379909
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.8838709677419355,
"acc_stderr": 0.018225757949432306,
"acc_norm": 0.8838709677419355,
"acc_norm_stderr": 0.018225757949432306
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6600985221674877,
"acc_stderr": 0.033327690684107895,
"acc_norm": 0.6600985221674877,
"acc_norm_stderr": 0.033327690684107895
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.82,
"acc_stderr": 0.038612291966536934,
"acc_norm": 0.82,
"acc_norm_stderr": 0.038612291966536934
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8545454545454545,
"acc_stderr": 0.027530196355066584,
"acc_norm": 0.8545454545454545,
"acc_norm_stderr": 0.027530196355066584
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9393939393939394,
"acc_stderr": 0.016999994927421592,
"acc_norm": 0.9393939393939394,
"acc_norm_stderr": 0.016999994927421592
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9844559585492227,
"acc_stderr": 0.008927492715084315,
"acc_norm": 0.9844559585492227,
"acc_norm_stderr": 0.008927492715084315
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.8076923076923077,
"acc_stderr": 0.019982347208637282,
"acc_norm": 0.8076923076923077,
"acc_norm_stderr": 0.019982347208637282
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.4703703703703704,
"acc_stderr": 0.030431963547936584,
"acc_norm": 0.4703703703703704,
"acc_norm_stderr": 0.030431963547936584
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.8445378151260504,
"acc_stderr": 0.023536818625398904,
"acc_norm": 0.8445378151260504,
"acc_norm_stderr": 0.023536818625398904
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.5629139072847682,
"acc_stderr": 0.040500357222306355,
"acc_norm": 0.5629139072847682,
"acc_norm_stderr": 0.040500357222306355
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9357798165137615,
"acc_stderr": 0.010510494713201403,
"acc_norm": 0.9357798165137615,
"acc_norm_stderr": 0.010510494713201403
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6805555555555556,
"acc_stderr": 0.03179876342176853,
"acc_norm": 0.6805555555555556,
"acc_norm_stderr": 0.03179876342176853
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.9117647058823529,
"acc_stderr": 0.019907399791316945,
"acc_norm": 0.9117647058823529,
"acc_norm_stderr": 0.019907399791316945
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.9113924050632911,
"acc_stderr": 0.018498315206865384,
"acc_norm": 0.9113924050632911,
"acc_norm_stderr": 0.018498315206865384
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7982062780269058,
"acc_stderr": 0.02693611191280227,
"acc_norm": 0.7982062780269058,
"acc_norm_stderr": 0.02693611191280227
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8931297709923665,
"acc_stderr": 0.027096548624883733,
"acc_norm": 0.8931297709923665,
"acc_norm_stderr": 0.027096548624883733
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8925619834710744,
"acc_stderr": 0.028268812192540616,
"acc_norm": 0.8925619834710744,
"acc_norm_stderr": 0.028268812192540616
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8611111111111112,
"acc_stderr": 0.033432700628696195,
"acc_norm": 0.8611111111111112,
"acc_norm_stderr": 0.033432700628696195
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8343558282208589,
"acc_stderr": 0.029208296231259104,
"acc_norm": 0.8343558282208589,
"acc_norm_stderr": 0.029208296231259104
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.6160714285714286,
"acc_stderr": 0.04616143075028546,
"acc_norm": 0.6160714285714286,
"acc_norm_stderr": 0.04616143075028546
},
"harness|hendrycksTest-management|5": {
"acc": 0.8543689320388349,
"acc_stderr": 0.0349260647662379,
"acc_norm": 0.8543689320388349,
"acc_norm_stderr": 0.0349260647662379
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9401709401709402,
"acc_stderr": 0.015537514263253874,
"acc_norm": 0.9401709401709402,
"acc_norm_stderr": 0.015537514263253874
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.86,
"acc_stderr": 0.034873508801977725,
"acc_norm": 0.86,
"acc_norm_stderr": 0.034873508801977725
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.9169859514687101,
"acc_stderr": 0.009866287394639536,
"acc_norm": 0.9169859514687101,
"acc_norm_stderr": 0.009866287394639536
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.8410404624277457,
"acc_stderr": 0.019685307033571946,
"acc_norm": 0.8410404624277457,
"acc_norm_stderr": 0.019685307033571946
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.6960893854748603,
"acc_stderr": 0.01538284558758452,
"acc_norm": 0.6960893854748603,
"acc_norm_stderr": 0.01538284558758452
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8496732026143791,
"acc_stderr": 0.02046417512433263,
"acc_norm": 0.8496732026143791,
"acc_norm_stderr": 0.02046417512433263
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.842443729903537,
"acc_stderr": 0.020692237273583984,
"acc_norm": 0.842443729903537,
"acc_norm_stderr": 0.020692237273583984
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8641975308641975,
"acc_stderr": 0.019061588181505405,
"acc_norm": 0.8641975308641975,
"acc_norm_stderr": 0.019061588181505405
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.6560283687943262,
"acc_stderr": 0.02833801742861133,
"acc_norm": 0.6560283687943262,
"acc_norm_stderr": 0.02833801742861133
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.6023468057366362,
"acc_stderr": 0.012499840347460642,
"acc_norm": 0.6023468057366362,
"acc_norm_stderr": 0.012499840347460642
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.8345588235294118,
"acc_stderr": 0.02257177102549473,
"acc_norm": 0.8345588235294118,
"acc_norm_stderr": 0.02257177102549473
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.815359477124183,
"acc_stderr": 0.015697029240757773,
"acc_norm": 0.815359477124183,
"acc_norm_stderr": 0.015697029240757773
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7454545454545455,
"acc_stderr": 0.04172343038705383,
"acc_norm": 0.7454545454545455,
"acc_norm_stderr": 0.04172343038705383
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.8163265306122449,
"acc_stderr": 0.024789071332007646,
"acc_norm": 0.8163265306122449,
"acc_norm_stderr": 0.024789071332007646
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.900497512437811,
"acc_stderr": 0.021166216304659397,
"acc_norm": 0.900497512437811,
"acc_norm_stderr": 0.021166216304659397
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.93,
"acc_stderr": 0.0256432399976243,
"acc_norm": 0.93,
"acc_norm_stderr": 0.0256432399976243
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5783132530120482,
"acc_stderr": 0.038444531817709175,
"acc_norm": 0.5783132530120482,
"acc_norm_stderr": 0.038444531817709175
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8713450292397661,
"acc_stderr": 0.025679342723276894,
"acc_norm": 0.8713450292397661,
"acc_norm_stderr": 0.025679342723276894
},
"harness|truthfulqa:mc|0": {
"mc1": 0.6560587515299877,
"mc1_stderr": 0.016629087514276785,
"mc2": 0.7666613083747418,
"mc2_stderr": 0.014124410528709273
},
"harness|winogrande|5": {
"acc": 0.850828729281768,
"acc_stderr": 0.010012598805627305
},
"harness|gsm8k|5": {
"acc": 0.7869598180439727,
"acc_stderr": 0.01127844785690078
},
"all": {
"acc": 0.7716613011645818,
"acc_stderr": 0.02801089457302993,
"acc_norm": 0.7734062646949216,
"acc_norm_stderr": 0.028568963791437117,
"mc1": 0.6560587515299877,
"mc1_stderr": 0.016629087514276785,
"mc2": 0.7666613083747418,
"mc2_stderr": 0.014124410528709273
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "a86de36cca2a19b9",
"hash_cont_tokens": "402adfa0ed1abfe3"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4687,
"non_padded": 0,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "9e46720a9638c8a4",
"hash_cont_tokens": "5856e609c5b49c4f"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40068,
"non_padded": 100,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "00dc12ab60f18dd3",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "5b71f0137904b4fd",
"hash_cont_tokens": "f9dae0f98ef7c0f2"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "eef2a8a18c3925c0",
"hash_cont_tokens": "dff84e206d2f1e0d"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "c7c3799588097fc1",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "0da466ef69c2c211",
"hash_cont_tokens": "b81dd170f83789d1"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "ea7865285fa63718",
"hash_cont_tokens": "85c3400292af3bb8"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "551968a6bc1e1c69",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "12804011678b362d",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "4bd091031fc263d9",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "7532f5d07c6debfd",
"hash_cont_tokens": "e5cb48f872b79ee7"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "091beb94392a1731",
"hash_cont_tokens": "40862171591ad909"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "d09375fff8e916d5",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "ac72ede0b36aabf2",
"hash_cont_tokens": "36bb2a47e8ff1bd8"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "0a3072da09eaf315",
"hash_cont_tokens": "433685e9aa542c2d"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "45dcd2a8820fad20",
"hash_cont_tokens": "f086b291b3aa0628"
},
"truncated": 0,
"non_truncated": 145,
"padded": 576,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "73321a8a08f43d2f",
"hash_cont_tokens": "4f402da407619e4d"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "ef395842ce6008ce",
"hash_cont_tokens": "80d8e3e54d900608"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "f98b91cdb7b86749",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "94f2ac3fa39ac4c7",
"hash_cont_tokens": "e07819899bd63630"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "1036da676d11ad62",
"hash_cont_tokens": "eb6259a94d61e372"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "f40b47b509c459ae",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "ca2a0a3cdee71062",
"hash_cont_tokens": "c3336566c025bc59"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "d2a95c354bd5bce3",
"hash_cont_tokens": "999a32d098465441"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "477712b69094d77b",
"hash_cont_tokens": "361410848e01f8ed"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "ea00f00108f471d1",
"hash_cont_tokens": "18f9ae57b2444806"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "6771092a57f1064b",
"hash_cont_tokens": "a13496e646060699"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "6434ce770cc3a07d",
"hash_cont_tokens": "791a7a25f0571e59"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "9b84202a0e20279e",
"hash_cont_tokens": "9677b0687811cf73"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "cbd1c4c25d9a95e1",
"hash_cont_tokens": "6393201d9136920e"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "a9d9974081f33401",
"hash_cont_tokens": "17caccbb3a38c7bf"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "4ea19e6b2da621ca",
"hash_cont_tokens": "7128e2eeb930d3b3"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "93e06ed8cb44fcb2",
"hash_cont_tokens": "48e22ae63ee54721"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "10b1be4021766536",
"hash_cont_tokens": "0f40704815d5b3f6"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "6e511aceb2a5cc1f",
"hash_cont_tokens": "a9fdf5917bdddc9b"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "f2250000a60c4675",
"hash_cont_tokens": "c63e45a81fbe97b2"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "1d37f2053687bf09",
"hash_cont_tokens": "9df89edb95ea3c08"
},
"truncated": 0,
"non_truncated": 108,
"padded": 428,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "50a65f6db7781df6",
"hash_cont_tokens": "5b4f21454680a984"
},
"truncated": 0,
"non_truncated": 163,
"padded": 652,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "0214f9e954e7fcf7",
"hash_cont_tokens": "0c2fc7f9e9101fbb"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "3fc286ea903dc9e1",
"hash_cont_tokens": "1279a23b3bc7b32c"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "dfa2c9b7866c93e6",
"hash_cont_tokens": "be76778b3b861344"
},
"truncated": 0,
"non_truncated": 234,
"padded": 936,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "642d259108067cec",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "1f55d640e75559b3",
"hash_cont_tokens": "c61a0f86b50f0556"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "11599cd92aca75c2",
"hash_cont_tokens": "a208a34c74088f6c"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1384,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "9965b932ec67e2ff",
"hash_cont_tokens": "996ce7a5b6c4aef1"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "1da6449a92c60335",
"hash_cont_tokens": "9d4280b06a73f2ad"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "eacd0118cde3a6b6",
"hash_cont_tokens": "9a708d21688a0b16"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "900a2e857049c7fb",
"hash_cont_tokens": "ed0ff6b6c4caf978"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "2368119814fe27da",
"hash_cont_tokens": "4fd1a023ef90b43a"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1120,
"non_padded": 8,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "ba0b150921d1354f",
"hash_cont_tokens": "d2c1c75d7c0e6ec5"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "b80d6f9095fb702f",
"hash_cont_tokens": "ff4c3ef8a56efe40"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "10151b922fe9fdba",
"hash_cont_tokens": "b4566ef91a66db7d"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "ed0c12fa575d30f6",
"hash_cont_tokens": "b713ae56c89df822"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "49436381f9054ab9",
"hash_cont_tokens": "89baef8c4b642ed0"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "9aff724e413681b7",
"hash_cont_tokens": "b92ed9d8dde61395"
},
"truncated": 0,
"non_truncated": 201,
"padded": 784,
"non_padded": 20,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "9103b692a946fc09",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "6dc6ade73ee63cae",
"hash_cont_tokens": "1c1bf88d7c979ef5"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "92ed8eba1ceb58b4",
"hash_cont_tokens": "9fbfaba067301be2"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "405dc01724068f4f",
"hash_cont_tokens": "2aa05ab785b97e1d"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "e512c1d089d1c425",
"hash_cont_tokens": "e5da1ddee7e80213"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "3b8275f3fce8067b",
"hash_cont_tokens": "0d206a3f326288d8"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "44ee6f861f86ab7d",
"hash_cont_tokens": "63510439853388bf"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113413,
"non_padded": 1459,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 1009823.331966069,
"end_time": 1033923.596197144,
"total_evaluation_time_secondes": "24100.264231075067",
"model_name": "cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_DPO_f16",
"model_sha": "cd29cfa124072c96ba8601230bead65d76e04dcb",
"model_dtype": "torch.bfloat16",
"model_size": "119.0 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.7218430034129693,
"acc_stderr": 0.0130944699195388,
"acc_norm": 0.7406143344709898,
"acc_norm_stderr": 0.012808273573927094
},
"harness|hellaswag|10": {
"acc": 0.6701852220673172,
"acc_stderr": 0.004691848665399069,
"acc_norm": 0.8673571001792472,
"acc_norm_stderr": 0.003384951803213475
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.7407407407407407,
"acc_stderr": 0.03785714465066653,
"acc_norm": 0.7407407407407407,
"acc_norm_stderr": 0.03785714465066653
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.875,
"acc_stderr": 0.026913523521537846,
"acc_norm": 0.875,
"acc_norm_stderr": 0.026913523521537846
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.76,
"acc_stderr": 0.04292346959909283,
"acc_norm": 0.76,
"acc_norm_stderr": 0.04292346959909283
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.8075471698113208,
"acc_stderr": 0.024262979839372274,
"acc_norm": 0.8075471698113208,
"acc_norm_stderr": 0.024262979839372274
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.8958333333333334,
"acc_stderr": 0.025545239210256917,
"acc_norm": 0.8958333333333334,
"acc_norm_stderr": 0.025545239210256917
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.6,
"acc_norm_stderr": 0.049236596391733084
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.44,
"acc_stderr": 0.0498887651569859,
"acc_norm": 0.44,
"acc_norm_stderr": 0.0498887651569859
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.7167630057803468,
"acc_stderr": 0.034355680560478746,
"acc_norm": 0.7167630057803468,
"acc_norm_stderr": 0.034355680560478746
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.5196078431372549,
"acc_stderr": 0.04971358884367406,
"acc_norm": 0.5196078431372549,
"acc_norm_stderr": 0.04971358884367406
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.79,
"acc_stderr": 0.04093601807403326,
"acc_norm": 0.79,
"acc_norm_stderr": 0.04093601807403326
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.7617021276595745,
"acc_stderr": 0.02785125297388977,
"acc_norm": 0.7617021276595745,
"acc_norm_stderr": 0.02785125297388977
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5964912280701754,
"acc_stderr": 0.04615186962583707,
"acc_norm": 0.5964912280701754,
"acc_norm_stderr": 0.04615186962583707
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.7517241379310344,
"acc_stderr": 0.036001056927277696,
"acc_norm": 0.7517241379310344,
"acc_norm_stderr": 0.036001056927277696
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.7433862433862434,
"acc_stderr": 0.022494510767503154,
"acc_norm": 0.7433862433862434,
"acc_norm_stderr": 0.022494510767503154
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.04444444444444449,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.04444444444444449
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.9064516129032258,
"acc_stderr": 0.016565754668270982,
"acc_norm": 0.9064516129032258,
"acc_norm_stderr": 0.016565754668270982
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6650246305418719,
"acc_stderr": 0.033208527423483104,
"acc_norm": 0.6650246305418719,
"acc_norm_stderr": 0.033208527423483104
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.77,
"acc_stderr": 0.042295258468165044,
"acc_norm": 0.77,
"acc_norm_stderr": 0.042295258468165044
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8848484848484849,
"acc_stderr": 0.024925699798115344,
"acc_norm": 0.8848484848484849,
"acc_norm_stderr": 0.024925699798115344
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9292929292929293,
"acc_stderr": 0.01826310542019949,
"acc_norm": 0.9292929292929293,
"acc_norm_stderr": 0.01826310542019949
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9740932642487047,
"acc_stderr": 0.011464523356953162,
"acc_norm": 0.9740932642487047,
"acc_norm_stderr": 0.011464523356953162
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.8102564102564103,
"acc_stderr": 0.019880165406588796,
"acc_norm": 0.8102564102564103,
"acc_norm_stderr": 0.019880165406588796
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.030296771286067323,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.030296771286067323
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.8361344537815126,
"acc_stderr": 0.024044054940440488,
"acc_norm": 0.8361344537815126,
"acc_norm_stderr": 0.024044054940440488
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.5231788079470199,
"acc_stderr": 0.04078093859163085,
"acc_norm": 0.5231788079470199,
"acc_norm_stderr": 0.04078093859163085
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9229357798165138,
"acc_stderr": 0.011434381698911096,
"acc_norm": 0.9229357798165138,
"acc_norm_stderr": 0.011434381698911096
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6712962962962963,
"acc_stderr": 0.032036140846700596,
"acc_norm": 0.6712962962962963,
"acc_norm_stderr": 0.032036140846700596
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.9264705882352942,
"acc_stderr": 0.018318855850089678,
"acc_norm": 0.9264705882352942,
"acc_norm_stderr": 0.018318855850089678
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.9113924050632911,
"acc_stderr": 0.018498315206865384,
"acc_norm": 0.9113924050632911,
"acc_norm_stderr": 0.018498315206865384
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.8026905829596412,
"acc_stderr": 0.02670985334496796,
"acc_norm": 0.8026905829596412,
"acc_norm_stderr": 0.02670985334496796
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8702290076335878,
"acc_stderr": 0.029473649496907065,
"acc_norm": 0.8702290076335878,
"acc_norm_stderr": 0.029473649496907065
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8842975206611571,
"acc_stderr": 0.02919980245562281,
"acc_norm": 0.8842975206611571,
"acc_norm_stderr": 0.02919980245562281
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8981481481481481,
"acc_stderr": 0.02923927267563275,
"acc_norm": 0.8981481481481481,
"acc_norm_stderr": 0.02923927267563275
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8773006134969326,
"acc_stderr": 0.025777328426978927,
"acc_norm": 0.8773006134969326,
"acc_norm_stderr": 0.025777328426978927
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5446428571428571,
"acc_stderr": 0.04726835553719098,
"acc_norm": 0.5446428571428571,
"acc_norm_stderr": 0.04726835553719098
},
"harness|hendrycksTest-management|5": {
"acc": 0.8640776699029126,
"acc_stderr": 0.0339329572976101,
"acc_norm": 0.8640776699029126,
"acc_norm_stderr": 0.0339329572976101
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9444444444444444,
"acc_stderr": 0.01500631280644693,
"acc_norm": 0.9444444444444444,
"acc_norm_stderr": 0.01500631280644693
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.86,
"acc_stderr": 0.0348735088019777,
"acc_norm": 0.86,
"acc_norm_stderr": 0.0348735088019777
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.913154533844189,
"acc_stderr": 0.01007029837774778,
"acc_norm": 0.913154533844189,
"acc_norm_stderr": 0.01007029837774778
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.8294797687861272,
"acc_stderr": 0.020247961569303728,
"acc_norm": 0.8294797687861272,
"acc_norm_stderr": 0.020247961569303728
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.8,
"acc_stderr": 0.013378001241813072,
"acc_norm": 0.8,
"acc_norm_stderr": 0.013378001241813072
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8562091503267973,
"acc_stderr": 0.02009118893604371,
"acc_norm": 0.8562091503267973,
"acc_norm_stderr": 0.02009118893604371
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7942122186495176,
"acc_stderr": 0.022961339906764248,
"acc_norm": 0.7942122186495176,
"acc_norm_stderr": 0.022961339906764248
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8672839506172839,
"acc_stderr": 0.01887735383957184,
"acc_norm": 0.8672839506172839,
"acc_norm_stderr": 0.01887735383957184
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.6418439716312057,
"acc_stderr": 0.028602085862759422,
"acc_norm": 0.6418439716312057,
"acc_norm_stderr": 0.028602085862759422
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.590612777053455,
"acc_stderr": 0.012558780895570757,
"acc_norm": 0.590612777053455,
"acc_norm_stderr": 0.012558780895570757
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.8382352941176471,
"acc_stderr": 0.022368672562886747,
"acc_norm": 0.8382352941176471,
"acc_norm_stderr": 0.022368672562886747
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.8137254901960784,
"acc_stderr": 0.01575052628436335,
"acc_norm": 0.8137254901960784,
"acc_norm_stderr": 0.01575052628436335
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7181818181818181,
"acc_stderr": 0.043091187099464585,
"acc_norm": 0.7181818181818181,
"acc_norm_stderr": 0.043091187099464585
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.8530612244897959,
"acc_stderr": 0.02266540041721764,
"acc_norm": 0.8530612244897959,
"acc_norm_stderr": 0.02266540041721764
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.9054726368159204,
"acc_stderr": 0.020687186951534094,
"acc_norm": 0.9054726368159204,
"acc_norm_stderr": 0.020687186951534094
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.92,
"acc_stderr": 0.0272659924344291,
"acc_norm": 0.92,
"acc_norm_stderr": 0.0272659924344291
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5843373493975904,
"acc_stderr": 0.03836722176598053,
"acc_norm": 0.5843373493975904,
"acc_norm_stderr": 0.03836722176598053
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8771929824561403,
"acc_stderr": 0.02517298435015577,
"acc_norm": 0.8771929824561403,
"acc_norm_stderr": 0.02517298435015577
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5556915544675642,
"mc1_stderr": 0.017394586250743176,
"mc2": 0.7224126373641326,
"mc2_stderr": 0.014009811551091062
},
"harness|winogrande|5": {
"acc": 0.8334648776637726,
"acc_stderr": 0.010470796496781098
},
"harness|gsm8k|5": {
"acc": 0.7445034116755117,
"acc_stderr": 0.012013462405460067
},
"all": {
"acc": 0.764901672440236,
"acc_stderr": 0.02826230862515645,
"acc_norm": 0.7677453718421197,
"acc_norm_stderr": 0.02881226227160178,
"mc1": 0.5556915544675642,
"mc1_stderr": 0.017394586250743176,
"mc2": 0.7224126373641326,
"mc2_stderr": 0.014009811551091062
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "f52f7134dd4e8235",
"hash_cont_tokens": "e23c779c4c2dd1ec"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4682,
"non_padded": 5,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "8380af90422a117e",
"hash_cont_tokens": "55da5ba61989a8fe"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40097,
"non_padded": 71,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "9185dc38dcc328ea",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "90fdbbaaf0213cec",
"hash_cont_tokens": "5cc800feae9fa1ad"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "cbe1c711494076b6",
"hash_cont_tokens": "655dbb90034f484a"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "09397035a4a73e5f",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "90c311de52544438",
"hash_cont_tokens": "f77b74d946d7fc02"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "d8fd4e3af4ae46c3",
"hash_cont_tokens": "1ba4b1a158d8bf3f"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "da514a10083e8e97",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "7ccea65975bb46d4",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "8ea8585f6adc2650",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "9d07c6e852253252",
"hash_cont_tokens": "78a0ebf66d91c5cf"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "0d3d540477f9eddb",
"hash_cont_tokens": "5a030c95824fdbe5"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "5ebc754afaa1fac8",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "7780b9cde8badacb",
"hash_cont_tokens": "2326dc60d0bc41b6"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "8acec1576892f7ab",
"hash_cont_tokens": "be908364b6f14dd6"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "e0321889f63f18d7",
"hash_cont_tokens": "179280ef597fe1bf"
},
"truncated": 0,
"non_truncated": 145,
"padded": 564,
"non_padded": 16,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "60e497887b9e2608",
"hash_cont_tokens": "95cdcdaf1abd0bd2"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "53adc0607e358206",
"hash_cont_tokens": "6a4818f3c307c346"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "34682f752c1a1ac4",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "bb5cc287970e5c14",
"hash_cont_tokens": "36d0d84455f0bdba"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "b12197fdbc9a45f0",
"hash_cont_tokens": "c678f794a9b8ee74"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "36408b638d9d7a8d",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "652bd20e505a2826",
"hash_cont_tokens": "e9c94304326d875c"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "8f4cd01faf05c6f1",
"hash_cont_tokens": "f937a1349eb483eb"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "217861435fcb5576",
"hash_cont_tokens": "8b27dd3907d25b4e"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "bcedb3cf953f812f",
"hash_cont_tokens": "3763cae29e2f938c"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "52affce916d66c97",
"hash_cont_tokens": "fd7b555352d765a4"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "b9d29201856d353d",
"hash_cont_tokens": "61f46d4a209b9aa2"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "9c27af329cb41097",
"hash_cont_tokens": "4e7053e7c19d680d"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "192aef17a8956826",
"hash_cont_tokens": "84d19ae8790476bb"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "a9bc6c02c6f83983",
"hash_cont_tokens": "b119c7b668213a4e"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "14741fa2bd2a4414",
"hash_cont_tokens": "a3b126bc622d571f"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "67f306eb2bf3d2cb",
"hash_cont_tokens": "9abf19ceb76331ff"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "e5cc30c46358588f",
"hash_cont_tokens": "0e2e725ae9a898da"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "10a6536adeac8632",
"hash_cont_tokens": "a94c1dea6d775249"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "d9015aba41ce0d5c",
"hash_cont_tokens": "3832f860859bb86b"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "d5f2109de63c3402",
"hash_cont_tokens": "9fac5a0c364fca8a"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "e0b39eb7c9788cfe",
"hash_cont_tokens": "dc53ed31134ddf3a"
},
"truncated": 0,
"non_truncated": 163,
"padded": 644,
"non_padded": 8,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "643a872ad0f99bb0",
"hash_cont_tokens": "e272b5456d5552d6"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "1232c5b0f524b151",
"hash_cont_tokens": "7119d4642957b1f0"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "f1d76d4a1e08e901",
"hash_cont_tokens": "099d58c66ece3f11"
},
"truncated": 0,
"non_truncated": 234,
"padded": 936,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "cd181ff20fe83b83",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "a3d90d10e2efc569",
"hash_cont_tokens": "bae342d4e82ba8f7"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "4b35576715cc147a",
"hash_cont_tokens": "578c64cbdbb1e0d4"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1384,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "1b93703ae85294ee",
"hash_cont_tokens": "79b25f42b3fce0f9"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "6741a26253bd4258",
"hash_cont_tokens": "9d1f3b976417156c"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "730a52e273f8fcf5",
"hash_cont_tokens": "88dab560e1e06d97"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "9e211e939e14b414",
"hash_cont_tokens": "04ea847139fe9393"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "d5761e6be99ed835",
"hash_cont_tokens": "0435ff692ad17e68"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1124,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "fcbc59834dbaa06c",
"hash_cont_tokens": "b852c74e9f8801bd"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "ba5999ee85a41b08",
"hash_cont_tokens": "5db0f6460652d063"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "35652463c3b2d9c6",
"hash_cont_tokens": "c960676ef7f3dbe5"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "af501bc2c58d000f",
"hash_cont_tokens": "3320565f412c4b01"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "5df7af45226ffc3a",
"hash_cont_tokens": "218ed775ef60aab9"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "5dc2e3734f4dd402",
"hash_cont_tokens": "20babf5cc4cc7f3d"
},
"truncated": 0,
"non_truncated": 201,
"padded": 804,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "ed972b660c40d1e4",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "ed703c55cc114c98",
"hash_cont_tokens": "dc6d57296bea0882"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "00cf9f5943b1480b",
"hash_cont_tokens": "37f53444db289ed3"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "5e931dfc6ab75011",
"hash_cont_tokens": "71a67034827cd30e"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "bd055e8ba456ab4a",
"hash_cont_tokens": "c93e9c22fa3077a0"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "5cae6c4034435931",
"hash_cont_tokens": "261f54d6603ee2bd"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "2f7ca631fba4ce39",
"hash_cont_tokens": "e621b4a7c3fa87a7"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113445,
"non_padded": 1427,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 251530.295188387,
"end_time": 275402.863791093,
"total_evaluation_time_secondes": "23872.568602705986",
"model_name": "cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_full_linear_DPO",
"model_sha": "e8e558b5fd4ac9da839577b1295d10ca75fc2663",
"model_dtype": "torch.float16",
"model_size": "119.0 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.7167235494880546,
"acc_stderr": 0.013167478735134575,
"acc_norm": 0.7406143344709898,
"acc_norm_stderr": 0.012808273573927097
},
"harness|hellaswag|10": {
"acc": 0.6703843855805617,
"acc_stderr": 0.004691128722535485,
"acc_norm": 0.8666600278828919,
"acc_norm_stderr": 0.003392470498816845
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.7555555555555555,
"acc_stderr": 0.03712537833614866,
"acc_norm": 0.7555555555555555,
"acc_norm_stderr": 0.03712537833614866
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.875,
"acc_stderr": 0.026913523521537846,
"acc_norm": 0.875,
"acc_norm_stderr": 0.026913523521537846
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.77,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.77,
"acc_norm_stderr": 0.04229525846816505
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.8037735849056604,
"acc_stderr": 0.024442388131100813,
"acc_norm": 0.8037735849056604,
"acc_norm_stderr": 0.024442388131100813
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.9027777777777778,
"acc_stderr": 0.024774516250440182,
"acc_norm": 0.9027777777777778,
"acc_norm_stderr": 0.024774516250440182
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.61,
"acc_stderr": 0.049020713000019756,
"acc_norm": 0.61,
"acc_norm_stderr": 0.049020713000019756
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.7109826589595376,
"acc_stderr": 0.034564257450869995,
"acc_norm": 0.7109826589595376,
"acc_norm_stderr": 0.034564257450869995
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.5196078431372549,
"acc_stderr": 0.04971358884367406,
"acc_norm": 0.5196078431372549,
"acc_norm_stderr": 0.04971358884367406
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.79,
"acc_stderr": 0.04093601807403326,
"acc_norm": 0.79,
"acc_norm_stderr": 0.04093601807403326
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.7574468085106383,
"acc_stderr": 0.028020226271200217,
"acc_norm": 0.7574468085106383,
"acc_norm_stderr": 0.028020226271200217
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5964912280701754,
"acc_stderr": 0.04615186962583707,
"acc_norm": 0.5964912280701754,
"acc_norm_stderr": 0.04615186962583707
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.7517241379310344,
"acc_stderr": 0.036001056927277696,
"acc_norm": 0.7517241379310344,
"acc_norm_stderr": 0.036001056927277696
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.7486772486772487,
"acc_stderr": 0.0223404823396439,
"acc_norm": 0.7486772486772487,
"acc_norm_stderr": 0.0223404823396439
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5158730158730159,
"acc_stderr": 0.044698818540726076,
"acc_norm": 0.5158730158730159,
"acc_norm_stderr": 0.044698818540726076
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.9064516129032258,
"acc_stderr": 0.016565754668270982,
"acc_norm": 0.9064516129032258,
"acc_norm_stderr": 0.016565754668270982
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6699507389162561,
"acc_stderr": 0.033085304262282574,
"acc_norm": 0.6699507389162561,
"acc_norm_stderr": 0.033085304262282574
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.77,
"acc_stderr": 0.042295258468165044,
"acc_norm": 0.77,
"acc_norm_stderr": 0.042295258468165044
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8848484848484849,
"acc_stderr": 0.024925699798115344,
"acc_norm": 0.8848484848484849,
"acc_norm_stderr": 0.024925699798115344
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9343434343434344,
"acc_stderr": 0.017646526677233335,
"acc_norm": 0.9343434343434344,
"acc_norm_stderr": 0.017646526677233335
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9740932642487047,
"acc_stderr": 0.011464523356953162,
"acc_norm": 0.9740932642487047,
"acc_norm_stderr": 0.011464523356953162
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.8102564102564103,
"acc_stderr": 0.019880165406588796,
"acc_norm": 0.8102564102564103,
"acc_norm_stderr": 0.019880165406588796
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.45925925925925926,
"acc_stderr": 0.030384169232350832,
"acc_norm": 0.45925925925925926,
"acc_norm_stderr": 0.030384169232350832
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.8445378151260504,
"acc_stderr": 0.023536818625398897,
"acc_norm": 0.8445378151260504,
"acc_norm_stderr": 0.023536818625398897
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.5165562913907285,
"acc_stderr": 0.04080244185628972,
"acc_norm": 0.5165562913907285,
"acc_norm_stderr": 0.04080244185628972
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9229357798165138,
"acc_stderr": 0.011434381698911096,
"acc_norm": 0.9229357798165138,
"acc_norm_stderr": 0.011434381698911096
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6620370370370371,
"acc_stderr": 0.03225941352631295,
"acc_norm": 0.6620370370370371,
"acc_norm_stderr": 0.03225941352631295
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.9264705882352942,
"acc_stderr": 0.018318855850089678,
"acc_norm": 0.9264705882352942,
"acc_norm_stderr": 0.018318855850089678
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.9113924050632911,
"acc_stderr": 0.018498315206865384,
"acc_norm": 0.9113924050632911,
"acc_norm_stderr": 0.018498315206865384
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.8026905829596412,
"acc_stderr": 0.02670985334496796,
"acc_norm": 0.8026905829596412,
"acc_norm_stderr": 0.02670985334496796
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8702290076335878,
"acc_stderr": 0.029473649496907065,
"acc_norm": 0.8702290076335878,
"acc_norm_stderr": 0.029473649496907065
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8760330578512396,
"acc_stderr": 0.030083098716035202,
"acc_norm": 0.8760330578512396,
"acc_norm_stderr": 0.030083098716035202
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8981481481481481,
"acc_stderr": 0.02923927267563275,
"acc_norm": 0.8981481481481481,
"acc_norm_stderr": 0.02923927267563275
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8711656441717791,
"acc_stderr": 0.026321383198783674,
"acc_norm": 0.8711656441717791,
"acc_norm_stderr": 0.026321383198783674
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5446428571428571,
"acc_stderr": 0.04726835553719098,
"acc_norm": 0.5446428571428571,
"acc_norm_stderr": 0.04726835553719098
},
"harness|hendrycksTest-management|5": {
"acc": 0.8640776699029126,
"acc_stderr": 0.0339329572976101,
"acc_norm": 0.8640776699029126,
"acc_norm_stderr": 0.0339329572976101
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9444444444444444,
"acc_stderr": 0.01500631280644693,
"acc_norm": 0.9444444444444444,
"acc_norm_stderr": 0.01500631280644693
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.86,
"acc_stderr": 0.0348735088019777,
"acc_norm": 0.86,
"acc_norm_stderr": 0.0348735088019777
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.9157088122605364,
"acc_stderr": 0.009934966499513791,
"acc_norm": 0.9157088122605364,
"acc_norm_stderr": 0.009934966499513791
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.8323699421965318,
"acc_stderr": 0.020110579919734847,
"acc_norm": 0.8323699421965318,
"acc_norm_stderr": 0.020110579919734847
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.8,
"acc_stderr": 0.013378001241813072,
"acc_norm": 0.8,
"acc_norm_stderr": 0.013378001241813072
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8431372549019608,
"acc_stderr": 0.02082375883758091,
"acc_norm": 0.8431372549019608,
"acc_norm_stderr": 0.02082375883758091
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.8006430868167203,
"acc_stderr": 0.022691033780549656,
"acc_norm": 0.8006430868167203,
"acc_norm_stderr": 0.022691033780549656
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8672839506172839,
"acc_stderr": 0.018877353839571842,
"acc_norm": 0.8672839506172839,
"acc_norm_stderr": 0.018877353839571842
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.648936170212766,
"acc_stderr": 0.028473501272963758,
"acc_norm": 0.648936170212766,
"acc_norm_stderr": 0.028473501272963758
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.5912646675358539,
"acc_stderr": 0.01255570134670338,
"acc_norm": 0.5912646675358539,
"acc_norm_stderr": 0.01255570134670338
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.8382352941176471,
"acc_stderr": 0.022368672562886747,
"acc_norm": 0.8382352941176471,
"acc_norm_stderr": 0.022368672562886747
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.815359477124183,
"acc_stderr": 0.015697029240757773,
"acc_norm": 0.815359477124183,
"acc_norm_stderr": 0.015697029240757773
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.04265792110940589,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.04265792110940589
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.8489795918367347,
"acc_stderr": 0.022923004094736847,
"acc_norm": 0.8489795918367347,
"acc_norm_stderr": 0.022923004094736847
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.9104477611940298,
"acc_stderr": 0.02019067053502792,
"acc_norm": 0.9104477611940298,
"acc_norm_stderr": 0.02019067053502792
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.92,
"acc_stderr": 0.0272659924344291,
"acc_norm": 0.92,
"acc_norm_stderr": 0.0272659924344291
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5843373493975904,
"acc_stderr": 0.03836722176598053,
"acc_norm": 0.5843373493975904,
"acc_norm_stderr": 0.03836722176598053
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8771929824561403,
"acc_stderr": 0.02517298435015577,
"acc_norm": 0.8771929824561403,
"acc_norm_stderr": 0.02517298435015577
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5458996328029376,
"mc1_stderr": 0.017429593091323522,
"mc2": 0.7131962651033679,
"mc2_stderr": 0.014139525056193024
},
"harness|winogrande|5": {
"acc": 0.8342541436464088,
"acc_stderr": 0.01045089954537063
},
"harness|gsm8k|5": {
"acc": 0.7293404094010614,
"acc_stderr": 0.012238245006183411
},
"all": {
"acc": 0.7649892778549832,
"acc_stderr": 0.02823313368050758,
"acc_norm": 0.7681511495490131,
"acc_norm_stderr": 0.028777527908042073,
"mc1": 0.5458996328029376,
"mc1_stderr": 0.017429593091323522,
"mc2": 0.7131962651033679,
"mc2_stderr": 0.014139525056193024
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "f52f7134dd4e8235",
"hash_cont_tokens": "e23c779c4c2dd1ec"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4682,
"non_padded": 5,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "8380af90422a117e",
"hash_cont_tokens": "55da5ba61989a8fe"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40097,
"non_padded": 71,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "9185dc38dcc328ea",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "90fdbbaaf0213cec",
"hash_cont_tokens": "5cc800feae9fa1ad"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "cbe1c711494076b6",
"hash_cont_tokens": "655dbb90034f484a"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "09397035a4a73e5f",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "90c311de52544438",
"hash_cont_tokens": "f77b74d946d7fc02"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "d8fd4e3af4ae46c3",
"hash_cont_tokens": "1ba4b1a158d8bf3f"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "da514a10083e8e97",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "7ccea65975bb46d4",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "8ea8585f6adc2650",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "9d07c6e852253252",
"hash_cont_tokens": "78a0ebf66d91c5cf"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "0d3d540477f9eddb",
"hash_cont_tokens": "5a030c95824fdbe5"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "5ebc754afaa1fac8",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "7780b9cde8badacb",
"hash_cont_tokens": "2326dc60d0bc41b6"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "8acec1576892f7ab",
"hash_cont_tokens": "be908364b6f14dd6"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "e0321889f63f18d7",
"hash_cont_tokens": "179280ef597fe1bf"
},
"truncated": 0,
"non_truncated": 145,
"padded": 564,
"non_padded": 16,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "60e497887b9e2608",
"hash_cont_tokens": "95cdcdaf1abd0bd2"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "53adc0607e358206",
"hash_cont_tokens": "6a4818f3c307c346"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "34682f752c1a1ac4",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "bb5cc287970e5c14",
"hash_cont_tokens": "36d0d84455f0bdba"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "b12197fdbc9a45f0",
"hash_cont_tokens": "c678f794a9b8ee74"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "36408b638d9d7a8d",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "652bd20e505a2826",
"hash_cont_tokens": "e9c94304326d875c"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "8f4cd01faf05c6f1",
"hash_cont_tokens": "f937a1349eb483eb"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "217861435fcb5576",
"hash_cont_tokens": "8b27dd3907d25b4e"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "bcedb3cf953f812f",
"hash_cont_tokens": "3763cae29e2f938c"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "52affce916d66c97",
"hash_cont_tokens": "fd7b555352d765a4"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "b9d29201856d353d",
"hash_cont_tokens": "61f46d4a209b9aa2"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "9c27af329cb41097",
"hash_cont_tokens": "4e7053e7c19d680d"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "192aef17a8956826",
"hash_cont_tokens": "84d19ae8790476bb"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "a9bc6c02c6f83983",
"hash_cont_tokens": "b119c7b668213a4e"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "14741fa2bd2a4414",
"hash_cont_tokens": "a3b126bc622d571f"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "67f306eb2bf3d2cb",
"hash_cont_tokens": "9abf19ceb76331ff"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "e5cc30c46358588f",
"hash_cont_tokens": "0e2e725ae9a898da"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "10a6536adeac8632",
"hash_cont_tokens": "a94c1dea6d775249"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "d9015aba41ce0d5c",
"hash_cont_tokens": "3832f860859bb86b"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "d5f2109de63c3402",
"hash_cont_tokens": "9fac5a0c364fca8a"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "e0b39eb7c9788cfe",
"hash_cont_tokens": "dc53ed31134ddf3a"
},
"truncated": 0,
"non_truncated": 163,
"padded": 644,
"non_padded": 8,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "643a872ad0f99bb0",
"hash_cont_tokens": "e272b5456d5552d6"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "1232c5b0f524b151",
"hash_cont_tokens": "7119d4642957b1f0"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "f1d76d4a1e08e901",
"hash_cont_tokens": "099d58c66ece3f11"
},
"truncated": 0,
"non_truncated": 234,
"padded": 936,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "cd181ff20fe83b83",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "a3d90d10e2efc569",
"hash_cont_tokens": "bae342d4e82ba8f7"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "4b35576715cc147a",
"hash_cont_tokens": "578c64cbdbb1e0d4"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1384,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "1b93703ae85294ee",
"hash_cont_tokens": "79b25f42b3fce0f9"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "6741a26253bd4258",
"hash_cont_tokens": "9d1f3b976417156c"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "730a52e273f8fcf5",
"hash_cont_tokens": "88dab560e1e06d97"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "9e211e939e14b414",
"hash_cont_tokens": "04ea847139fe9393"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "d5761e6be99ed835",
"hash_cont_tokens": "0435ff692ad17e68"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1124,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "fcbc59834dbaa06c",
"hash_cont_tokens": "b852c74e9f8801bd"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "ba5999ee85a41b08",
"hash_cont_tokens": "5db0f6460652d063"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "35652463c3b2d9c6",
"hash_cont_tokens": "c960676ef7f3dbe5"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "af501bc2c58d000f",
"hash_cont_tokens": "3320565f412c4b01"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "5df7af45226ffc3a",
"hash_cont_tokens": "218ed775ef60aab9"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "5dc2e3734f4dd402",
"hash_cont_tokens": "20babf5cc4cc7f3d"
},
"truncated": 0,
"non_truncated": 201,
"padded": 804,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "ed972b660c40d1e4",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "ed703c55cc114c98",
"hash_cont_tokens": "dc6d57296bea0882"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "00cf9f5943b1480b",
"hash_cont_tokens": "37f53444db289ed3"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "5e931dfc6ab75011",
"hash_cont_tokens": "71a67034827cd30e"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "bd055e8ba456ab4a",
"hash_cont_tokens": "c93e9c22fa3077a0"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "5cae6c4034435931",
"hash_cont_tokens": "e6f93e4201cbe9f3"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "2f7ca631fba4ce39",
"hash_cont_tokens": "20f72a3f7cfc8e5d"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113445,
"non_padded": 1427,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 4479070.352066593,
"end_time": 4491813.104552827,
"total_evaluation_time_secondes": "12742.7524862336",
"model_name": "fblgit/UNA-SimpleSmaug-34b-v1beta",
"model_sha": "e1cdc5b02c662c5f29a50d0b22c64a8902ca856b",
"model_dtype": "torch.bfloat16",
"model_size": "64.99 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.7192832764505119,
"acc_stderr": 0.013131238126975583,
"acc_norm": 0.7457337883959044,
"acc_norm_stderr": 0.012724999945157736
},
"harness|hellaswag|10": {
"acc": 0.6709818761202948,
"acc_stderr": 0.004688963175758129,
"acc_norm": 0.8673571001792472,
"acc_norm_stderr": 0.003384951803213472
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956911,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956911
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.7407407407407407,
"acc_stderr": 0.03785714465066653,
"acc_norm": 0.7407407407407407,
"acc_norm_stderr": 0.03785714465066653
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.881578947368421,
"acc_stderr": 0.02629399585547494,
"acc_norm": 0.881578947368421,
"acc_norm_stderr": 0.02629399585547494
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.78,
"acc_stderr": 0.04163331998932261,
"acc_norm": 0.78,
"acc_norm_stderr": 0.04163331998932261
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.8,
"acc_stderr": 0.024618298195866514,
"acc_norm": 0.8,
"acc_norm_stderr": 0.024618298195866514
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.9027777777777778,
"acc_stderr": 0.024774516250440182,
"acc_norm": 0.9027777777777778,
"acc_norm_stderr": 0.024774516250440182
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237101,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237101
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.7225433526011561,
"acc_stderr": 0.034140140070440354,
"acc_norm": 0.7225433526011561,
"acc_norm_stderr": 0.034140140070440354
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.5392156862745098,
"acc_stderr": 0.04959859966384181,
"acc_norm": 0.5392156862745098,
"acc_norm_stderr": 0.04959859966384181
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.79,
"acc_stderr": 0.04093601807403326,
"acc_norm": 0.79,
"acc_norm_stderr": 0.04093601807403326
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.774468085106383,
"acc_stderr": 0.027321078417387533,
"acc_norm": 0.774468085106383,
"acc_norm_stderr": 0.027321078417387533
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5964912280701754,
"acc_stderr": 0.04615186962583707,
"acc_norm": 0.5964912280701754,
"acc_norm_stderr": 0.04615186962583707
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.7586206896551724,
"acc_stderr": 0.03565998174135302,
"acc_norm": 0.7586206896551724,
"acc_norm_stderr": 0.03565998174135302
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.7380952380952381,
"acc_stderr": 0.02264421261552521,
"acc_norm": 0.7380952380952381,
"acc_norm_stderr": 0.02264421261552521
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5476190476190477,
"acc_stderr": 0.044518079590553275,
"acc_norm": 0.5476190476190477,
"acc_norm_stderr": 0.044518079590553275
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.9064516129032258,
"acc_stderr": 0.016565754668270982,
"acc_norm": 0.9064516129032258,
"acc_norm_stderr": 0.016565754668270982
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6847290640394089,
"acc_stderr": 0.03269080871970186,
"acc_norm": 0.6847290640394089,
"acc_norm_stderr": 0.03269080871970186
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.77,
"acc_stderr": 0.042295258468165044,
"acc_norm": 0.77,
"acc_norm_stderr": 0.042295258468165044
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8666666666666667,
"acc_stderr": 0.026544435312706467,
"acc_norm": 0.8666666666666667,
"acc_norm_stderr": 0.026544435312706467
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9242424242424242,
"acc_stderr": 0.018852670234993093,
"acc_norm": 0.9242424242424242,
"acc_norm_stderr": 0.018852670234993093
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9792746113989638,
"acc_stderr": 0.010281417011909025,
"acc_norm": 0.9792746113989638,
"acc_norm_stderr": 0.010281417011909025
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.8153846153846154,
"acc_stderr": 0.019671632413100295,
"acc_norm": 0.8153846153846154,
"acc_norm_stderr": 0.019671632413100295
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.46296296296296297,
"acc_stderr": 0.030401786406101507,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.030401786406101507
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.8529411764705882,
"acc_stderr": 0.023005459446673936,
"acc_norm": 0.8529411764705882,
"acc_norm_stderr": 0.023005459446673936
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.5165562913907285,
"acc_stderr": 0.04080244185628972,
"acc_norm": 0.5165562913907285,
"acc_norm_stderr": 0.04080244185628972
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9247706422018349,
"acc_stderr": 0.011308662537571727,
"acc_norm": 0.9247706422018349,
"acc_norm_stderr": 0.011308662537571727
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6574074074074074,
"acc_stderr": 0.032365852526021574,
"acc_norm": 0.6574074074074074,
"acc_norm_stderr": 0.032365852526021574
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.9264705882352942,
"acc_stderr": 0.018318855850089678,
"acc_norm": 0.9264705882352942,
"acc_norm_stderr": 0.018318855850089678
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.9113924050632911,
"acc_stderr": 0.018498315206865384,
"acc_norm": 0.9113924050632911,
"acc_norm_stderr": 0.018498315206865384
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.820627802690583,
"acc_stderr": 0.0257498195691928,
"acc_norm": 0.820627802690583,
"acc_norm_stderr": 0.0257498195691928
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8625954198473282,
"acc_stderr": 0.030194823996804475,
"acc_norm": 0.8625954198473282,
"acc_norm_stderr": 0.030194823996804475
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8760330578512396,
"acc_stderr": 0.030083098716035216,
"acc_norm": 0.8760330578512396,
"acc_norm_stderr": 0.030083098716035216
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8981481481481481,
"acc_stderr": 0.02923927267563275,
"acc_norm": 0.8981481481481481,
"acc_norm_stderr": 0.02923927267563275
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8711656441717791,
"acc_stderr": 0.026321383198783674,
"acc_norm": 0.8711656441717791,
"acc_norm_stderr": 0.026321383198783674
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5625,
"acc_stderr": 0.04708567521880525,
"acc_norm": 0.5625,
"acc_norm_stderr": 0.04708567521880525
},
"harness|hendrycksTest-management|5": {
"acc": 0.8640776699029126,
"acc_stderr": 0.033932957297610096,
"acc_norm": 0.8640776699029126,
"acc_norm_stderr": 0.033932957297610096
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9444444444444444,
"acc_stderr": 0.01500631280644693,
"acc_norm": 0.9444444444444444,
"acc_norm_stderr": 0.01500631280644693
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.85,
"acc_stderr": 0.03588702812826371,
"acc_norm": 0.85,
"acc_norm_stderr": 0.03588702812826371
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.9169859514687101,
"acc_stderr": 0.009866287394639541,
"acc_norm": 0.9169859514687101,
"acc_norm_stderr": 0.009866287394639541
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.8265895953757225,
"acc_stderr": 0.02038322955113502,
"acc_norm": 0.8265895953757225,
"acc_norm_stderr": 0.02038322955113502
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.7921787709497207,
"acc_stderr": 0.01357024832508134,
"acc_norm": 0.7921787709497207,
"acc_norm_stderr": 0.01357024832508134
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8594771241830066,
"acc_stderr": 0.019899435463539946,
"acc_norm": 0.8594771241830066,
"acc_norm_stderr": 0.019899435463539946
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.8038585209003215,
"acc_stderr": 0.022552447780478033,
"acc_norm": 0.8038585209003215,
"acc_norm_stderr": 0.022552447780478033
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8703703703703703,
"acc_stderr": 0.018689725721062072,
"acc_norm": 0.8703703703703703,
"acc_norm_stderr": 0.018689725721062072
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.6347517730496454,
"acc_stderr": 0.02872386385328127,
"acc_norm": 0.6347517730496454,
"acc_norm_stderr": 0.02872386385328127
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.5951760104302477,
"acc_stderr": 0.012536743830953986,
"acc_norm": 0.5951760104302477,
"acc_norm_stderr": 0.012536743830953986
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.8235294117647058,
"acc_stderr": 0.023157468308559345,
"acc_norm": 0.8235294117647058,
"acc_norm_stderr": 0.023157468308559345
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.8186274509803921,
"acc_stderr": 0.015588643495370463,
"acc_norm": 0.8186274509803921,
"acc_norm_stderr": 0.015588643495370463
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7090909090909091,
"acc_stderr": 0.04350271442923243,
"acc_norm": 0.7090909090909091,
"acc_norm_stderr": 0.04350271442923243
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.8489795918367347,
"acc_stderr": 0.022923004094736847,
"acc_norm": 0.8489795918367347,
"acc_norm_stderr": 0.022923004094736847
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.9104477611940298,
"acc_stderr": 0.02019067053502792,
"acc_norm": 0.9104477611940298,
"acc_norm_stderr": 0.02019067053502792
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.91,
"acc_stderr": 0.02876234912646613,
"acc_norm": 0.91,
"acc_norm_stderr": 0.02876234912646613
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5783132530120482,
"acc_stderr": 0.03844453181770917,
"acc_norm": 0.5783132530120482,
"acc_norm_stderr": 0.03844453181770917
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8596491228070176,
"acc_stderr": 0.026640582539133196,
"acc_norm": 0.8596491228070176,
"acc_norm_stderr": 0.026640582539133196
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5299877600979193,
"mc1_stderr": 0.017471992091697534,
"mc2": 0.7016557407771556,
"mc2_stderr": 0.014224339474805845
},
"harness|winogrande|5": {
"acc": 0.8382004735595896,
"acc_stderr": 0.010350128010292404
},
"harness|gsm8k|5": {
"acc": 0.7247915087187263,
"acc_stderr": 0.012302114305862656
},
"all": {
"acc": 0.7649553475572979,
"acc_stderr": 0.02829491282350785,
"acc_norm": 0.7681713551647662,
"acc_norm_stderr": 0.028841138819719683,
"mc1": 0.5299877600979193,
"mc1_stderr": 0.017471992091697534,
"mc2": 0.7016557407771556,
"mc2_stderr": 0.014224339474805845
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "f52f7134dd4e8235",
"hash_cont_tokens": "e23c779c4c2dd1ec"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4682,
"non_padded": 5,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "8380af90422a117e",
"hash_cont_tokens": "55da5ba61989a8fe"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40097,
"non_padded": 71,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "9185dc38dcc328ea",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "90fdbbaaf0213cec",
"hash_cont_tokens": "5cc800feae9fa1ad"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "cbe1c711494076b6",
"hash_cont_tokens": "655dbb90034f484a"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "09397035a4a73e5f",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "90c311de52544438",
"hash_cont_tokens": "f77b74d946d7fc02"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "d8fd4e3af4ae46c3",
"hash_cont_tokens": "1ba4b1a158d8bf3f"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "da514a10083e8e97",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "7ccea65975bb46d4",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "8ea8585f6adc2650",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "9d07c6e852253252",
"hash_cont_tokens": "78a0ebf66d91c5cf"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "0d3d540477f9eddb",
"hash_cont_tokens": "5a030c95824fdbe5"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "5ebc754afaa1fac8",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "7780b9cde8badacb",
"hash_cont_tokens": "2326dc60d0bc41b6"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "8acec1576892f7ab",
"hash_cont_tokens": "be908364b6f14dd6"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "e0321889f63f18d7",
"hash_cont_tokens": "179280ef597fe1bf"
},
"truncated": 0,
"non_truncated": 145,
"padded": 564,
"non_padded": 16,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "60e497887b9e2608",
"hash_cont_tokens": "95cdcdaf1abd0bd2"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "53adc0607e358206",
"hash_cont_tokens": "6a4818f3c307c346"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "34682f752c1a1ac4",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "bb5cc287970e5c14",
"hash_cont_tokens": "36d0d84455f0bdba"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "b12197fdbc9a45f0",
"hash_cont_tokens": "c678f794a9b8ee74"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "36408b638d9d7a8d",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "652bd20e505a2826",
"hash_cont_tokens": "e9c94304326d875c"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "8f4cd01faf05c6f1",
"hash_cont_tokens": "f937a1349eb483eb"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "217861435fcb5576",
"hash_cont_tokens": "8b27dd3907d25b4e"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "bcedb3cf953f812f",
"hash_cont_tokens": "3763cae29e2f938c"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "52affce916d66c97",
"hash_cont_tokens": "fd7b555352d765a4"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "b9d29201856d353d",
"hash_cont_tokens": "61f46d4a209b9aa2"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "9c27af329cb41097",
"hash_cont_tokens": "4e7053e7c19d680d"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "192aef17a8956826",
"hash_cont_tokens": "84d19ae8790476bb"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "a9bc6c02c6f83983",
"hash_cont_tokens": "b119c7b668213a4e"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "14741fa2bd2a4414",
"hash_cont_tokens": "a3b126bc622d571f"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "67f306eb2bf3d2cb",
"hash_cont_tokens": "9abf19ceb76331ff"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "e5cc30c46358588f",
"hash_cont_tokens": "0e2e725ae9a898da"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "10a6536adeac8632",
"hash_cont_tokens": "a94c1dea6d775249"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "d9015aba41ce0d5c",
"hash_cont_tokens": "3832f860859bb86b"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "d5f2109de63c3402",
"hash_cont_tokens": "9fac5a0c364fca8a"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "e0b39eb7c9788cfe",
"hash_cont_tokens": "dc53ed31134ddf3a"
},
"truncated": 0,
"non_truncated": 163,
"padded": 644,
"non_padded": 8,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "643a872ad0f99bb0",
"hash_cont_tokens": "e272b5456d5552d6"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "1232c5b0f524b151",
"hash_cont_tokens": "7119d4642957b1f0"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "f1d76d4a1e08e901",
"hash_cont_tokens": "099d58c66ece3f11"
},
"truncated": 0,
"non_truncated": 234,
"padded": 936,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "cd181ff20fe83b83",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "a3d90d10e2efc569",
"hash_cont_tokens": "bae342d4e82ba8f7"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "4b35576715cc147a",
"hash_cont_tokens": "578c64cbdbb1e0d4"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1384,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "1b93703ae85294ee",
"hash_cont_tokens": "79b25f42b3fce0f9"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "6741a26253bd4258",
"hash_cont_tokens": "9d1f3b976417156c"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "730a52e273f8fcf5",
"hash_cont_tokens": "88dab560e1e06d97"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "9e211e939e14b414",
"hash_cont_tokens": "04ea847139fe9393"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "d5761e6be99ed835",
"hash_cont_tokens": "0435ff692ad17e68"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1124,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "fcbc59834dbaa06c",
"hash_cont_tokens": "b852c74e9f8801bd"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "ba5999ee85a41b08",
"hash_cont_tokens": "5db0f6460652d063"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "35652463c3b2d9c6",
"hash_cont_tokens": "c960676ef7f3dbe5"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "af501bc2c58d000f",
"hash_cont_tokens": "3320565f412c4b01"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "5df7af45226ffc3a",
"hash_cont_tokens": "218ed775ef60aab9"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "5dc2e3734f4dd402",
"hash_cont_tokens": "20babf5cc4cc7f3d"
},
"truncated": 0,
"non_truncated": 201,
"padded": 804,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "ed972b660c40d1e4",
"hash_cont_tokens": "bcc22fd85dcc85e9"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "ed703c55cc114c98",
"hash_cont_tokens": "dc6d57296bea0882"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "00cf9f5943b1480b",
"hash_cont_tokens": "37f53444db289ed3"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "5e931dfc6ab75011",
"hash_cont_tokens": "71a67034827cd30e"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "bd055e8ba456ab4a",
"hash_cont_tokens": "c93e9c22fa3077a0"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "5cae6c4034435931",
"hash_cont_tokens": "b236c2e3a7474dab"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "2f7ca631fba4ce39",
"hash_cont_tokens": "0bc4be8782703445"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113445,
"non_padded": 1427,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 1821891.555305934,
"end_time": 1859536.458506719,
"total_evaluation_time_secondes": "37644.90320078493",
"model_name": "ibivibiv/alpaca-dragon-72b-v1",
"model_sha": "4df251a558c53b6b6a4c459045b161951cfc3c4e",
"model_dtype": "torch.float16",
"model_size": "135.9 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.7081911262798635,
"acc_stderr": 0.013284525292403503,
"acc_norm": 0.7389078498293515,
"acc_norm_stderr": 0.012835523909473847
},
"harness|hellaswag|10": {
"acc": 0.6983668591913962,
"acc_stderr": 0.0045802887281959775,
"acc_norm": 0.8815972913762199,
"acc_norm_stderr": 0.0032242407223513165
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.7185185185185186,
"acc_stderr": 0.038850042458002526,
"acc_norm": 0.7185185185185186,
"acc_norm_stderr": 0.038850042458002526
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.868421052631579,
"acc_stderr": 0.02750868953354992,
"acc_norm": 0.868421052631579,
"acc_norm_stderr": 0.02750868953354992
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.82,
"acc_stderr": 0.038612291966536955,
"acc_norm": 0.82,
"acc_norm_stderr": 0.038612291966536955
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.8452830188679246,
"acc_stderr": 0.022257075558791282,
"acc_norm": 0.8452830188679246,
"acc_norm_stderr": 0.022257075558791282
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.9305555555555556,
"acc_stderr": 0.02125797482283205,
"acc_norm": 0.9305555555555556,
"acc_norm_stderr": 0.02125797482283205
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.65,
"acc_stderr": 0.04793724854411019,
"acc_norm": 0.65,
"acc_norm_stderr": 0.04793724854411019
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.7514450867052023,
"acc_stderr": 0.03295304696818317,
"acc_norm": 0.7514450867052023,
"acc_norm_stderr": 0.03295304696818317
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.5588235294117647,
"acc_stderr": 0.049406356306056595,
"acc_norm": 0.5588235294117647,
"acc_norm_stderr": 0.049406356306056595
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.82,
"acc_stderr": 0.03861229196653695,
"acc_norm": 0.82,
"acc_norm_stderr": 0.03861229196653695
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.8042553191489362,
"acc_stderr": 0.025937853139977148,
"acc_norm": 0.8042553191489362,
"acc_norm_stderr": 0.025937853139977148
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.6052631578947368,
"acc_stderr": 0.045981880578165414,
"acc_norm": 0.6052631578947368,
"acc_norm_stderr": 0.045981880578165414
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.7931034482758621,
"acc_stderr": 0.03375672449560553,
"acc_norm": 0.7931034482758621,
"acc_norm_stderr": 0.03375672449560553
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.7037037037037037,
"acc_stderr": 0.023517294335963286,
"acc_norm": 0.7037037037037037,
"acc_norm_stderr": 0.023517294335963286
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5873015873015873,
"acc_stderr": 0.04403438954768176,
"acc_norm": 0.5873015873015873,
"acc_norm_stderr": 0.04403438954768176
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.8838709677419355,
"acc_stderr": 0.018225757949432306,
"acc_norm": 0.8838709677419355,
"acc_norm_stderr": 0.018225757949432306
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6600985221674877,
"acc_stderr": 0.033327690684107895,
"acc_norm": 0.6600985221674877,
"acc_norm_stderr": 0.033327690684107895
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.82,
"acc_stderr": 0.038612291966536934,
"acc_norm": 0.82,
"acc_norm_stderr": 0.038612291966536934
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8424242424242424,
"acc_stderr": 0.028450388805284357,
"acc_norm": 0.8424242424242424,
"acc_norm_stderr": 0.028450388805284357
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9444444444444444,
"acc_stderr": 0.0163199507007674,
"acc_norm": 0.9444444444444444,
"acc_norm_stderr": 0.0163199507007674
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9896373056994818,
"acc_stderr": 0.007308424386792194,
"acc_norm": 0.9896373056994818,
"acc_norm_stderr": 0.007308424386792194
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.8205128205128205,
"acc_stderr": 0.019457390787681782,
"acc_norm": 0.8205128205128205,
"acc_norm_stderr": 0.019457390787681782
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.45925925925925926,
"acc_stderr": 0.030384169232350818,
"acc_norm": 0.45925925925925926,
"acc_norm_stderr": 0.030384169232350818
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.8613445378151261,
"acc_stderr": 0.022448264476832586,
"acc_norm": 0.8613445378151261,
"acc_norm_stderr": 0.022448264476832586
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.5496688741721855,
"acc_stderr": 0.04062290018683775,
"acc_norm": 0.5496688741721855,
"acc_norm_stderr": 0.04062290018683775
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9284403669724771,
"acc_stderr": 0.01105125524781548,
"acc_norm": 0.9284403669724771,
"acc_norm_stderr": 0.01105125524781548
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6898148148148148,
"acc_stderr": 0.03154696285656627,
"acc_norm": 0.6898148148148148,
"acc_norm_stderr": 0.03154696285656627
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.9166666666666666,
"acc_stderr": 0.019398452135813905,
"acc_norm": 0.9166666666666666,
"acc_norm_stderr": 0.019398452135813905
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8987341772151899,
"acc_stderr": 0.019637720526065508,
"acc_norm": 0.8987341772151899,
"acc_norm_stderr": 0.019637720526065508
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7802690582959642,
"acc_stderr": 0.027790177064383602,
"acc_norm": 0.7802690582959642,
"acc_norm_stderr": 0.027790177064383602
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8931297709923665,
"acc_stderr": 0.027096548624883733,
"acc_norm": 0.8931297709923665,
"acc_norm_stderr": 0.027096548624883733
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.9090909090909091,
"acc_stderr": 0.026243194054073885,
"acc_norm": 0.9090909090909091,
"acc_norm_stderr": 0.026243194054073885
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8796296296296297,
"acc_stderr": 0.031457038543062504,
"acc_norm": 0.8796296296296297,
"acc_norm_stderr": 0.031457038543062504
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8404907975460123,
"acc_stderr": 0.02876748172598386,
"acc_norm": 0.8404907975460123,
"acc_norm_stderr": 0.02876748172598386
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5892857142857143,
"acc_stderr": 0.04669510663875191,
"acc_norm": 0.5892857142857143,
"acc_norm_stderr": 0.04669510663875191
},
"harness|hendrycksTest-management|5": {
"acc": 0.8737864077669902,
"acc_stderr": 0.03288180278808628,
"acc_norm": 0.8737864077669902,
"acc_norm_stderr": 0.03288180278808628
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9529914529914529,
"acc_stderr": 0.013866120058594849,
"acc_norm": 0.9529914529914529,
"acc_norm_stderr": 0.013866120058594849
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.86,
"acc_stderr": 0.034873508801977725,
"acc_norm": 0.86,
"acc_norm_stderr": 0.034873508801977725
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.9220945083014048,
"acc_stderr": 0.00958447607669305,
"acc_norm": 0.9220945083014048,
"acc_norm_stderr": 0.00958447607669305
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.8352601156069365,
"acc_stderr": 0.019971040982442265,
"acc_norm": 0.8352601156069365,
"acc_norm_stderr": 0.019971040982442265
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.6972067039106146,
"acc_stderr": 0.015366860386397114,
"acc_norm": 0.6972067039106146,
"acc_norm_stderr": 0.015366860386397114
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8562091503267973,
"acc_stderr": 0.020091188936043714,
"acc_norm": 0.8562091503267973,
"acc_norm_stderr": 0.020091188936043714
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.8360128617363344,
"acc_stderr": 0.0210295764646627,
"acc_norm": 0.8360128617363344,
"acc_norm_stderr": 0.0210295764646627
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8703703703703703,
"acc_stderr": 0.01868972572106207,
"acc_norm": 0.8703703703703703,
"acc_norm_stderr": 0.01868972572106207
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.6595744680851063,
"acc_stderr": 0.028267657482650158,
"acc_norm": 0.6595744680851063,
"acc_norm_stderr": 0.028267657482650158
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.6069100391134289,
"acc_stderr": 0.012474899613873955,
"acc_norm": 0.6069100391134289,
"acc_norm_stderr": 0.012474899613873955
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.8272058823529411,
"acc_stderr": 0.02296606758558181,
"acc_norm": 0.8272058823529411,
"acc_norm_stderr": 0.02296606758558181
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.8186274509803921,
"acc_stderr": 0.015588643495370456,
"acc_norm": 0.8186274509803921,
"acc_norm_stderr": 0.015588643495370456
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7545454545454545,
"acc_stderr": 0.041220665028782855,
"acc_norm": 0.7545454545454545,
"acc_norm_stderr": 0.041220665028782855
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.8204081632653061,
"acc_stderr": 0.024573293589585637,
"acc_norm": 0.8204081632653061,
"acc_norm_stderr": 0.024573293589585637
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8855721393034826,
"acc_stderr": 0.022509345325101713,
"acc_norm": 0.8855721393034826,
"acc_norm_stderr": 0.022509345325101713
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.94,
"acc_stderr": 0.02386832565759419,
"acc_norm": 0.94,
"acc_norm_stderr": 0.02386832565759419
},
"harness|hendrycksTest-virology|5": {
"acc": 0.572289156626506,
"acc_stderr": 0.03851597683718533,
"acc_norm": 0.572289156626506,
"acc_norm_stderr": 0.03851597683718533
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8947368421052632,
"acc_stderr": 0.02353755765789256,
"acc_norm": 0.8947368421052632,
"acc_norm_stderr": 0.02353755765789256
},
"harness|truthfulqa:mc|0": {
"mc1": 0.602203182374541,
"mc1_stderr": 0.017133934248559676,
"mc2": 0.7269367196902492,
"mc2_stderr": 0.014514419838318984
},
"harness|winogrande|5": {
"acc": 0.8602999210734017,
"acc_stderr": 0.009743307618298171
},
"harness|gsm8k|5": {
"acc": 0.7763457164518575,
"acc_stderr": 0.011477795578836113
},
"all": {
"acc": 0.7731228139040556,
"acc_stderr": 0.027777455725368695,
"acc_norm": 0.7752168333280753,
"acc_norm_stderr": 0.028328790621382747,
"mc1": 0.602203182374541,
"mc1_stderr": 0.017133934248559676,
"mc2": 0.7269367196902492,
"mc2_stderr": 0.014514419838318984
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "a86de36cca2a19b9",
"hash_cont_tokens": "402adfa0ed1abfe3"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4687,
"non_padded": 0,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "9e46720a9638c8a4",
"hash_cont_tokens": "5856e609c5b49c4f"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40068,
"non_padded": 100,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "00dc12ab60f18dd3",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "5b71f0137904b4fd",
"hash_cont_tokens": "f9dae0f98ef7c0f2"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "eef2a8a18c3925c0",
"hash_cont_tokens": "dff84e206d2f1e0d"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "c7c3799588097fc1",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "0da466ef69c2c211",
"hash_cont_tokens": "b81dd170f83789d1"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "ea7865285fa63718",
"hash_cont_tokens": "85c3400292af3bb8"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "551968a6bc1e1c69",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "12804011678b362d",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "4bd091031fc263d9",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "7532f5d07c6debfd",
"hash_cont_tokens": "e5cb48f872b79ee7"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "091beb94392a1731",
"hash_cont_tokens": "40862171591ad909"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "d09375fff8e916d5",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "ac72ede0b36aabf2",
"hash_cont_tokens": "36bb2a47e8ff1bd8"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "0a3072da09eaf315",
"hash_cont_tokens": "433685e9aa542c2d"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "45dcd2a8820fad20",
"hash_cont_tokens": "f086b291b3aa0628"
},
"truncated": 0,
"non_truncated": 145,
"padded": 576,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "73321a8a08f43d2f",
"hash_cont_tokens": "4f402da407619e4d"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "ef395842ce6008ce",
"hash_cont_tokens": "80d8e3e54d900608"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "f98b91cdb7b86749",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "94f2ac3fa39ac4c7",
"hash_cont_tokens": "e07819899bd63630"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "1036da676d11ad62",
"hash_cont_tokens": "eb6259a94d61e372"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "f40b47b509c459ae",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "ca2a0a3cdee71062",
"hash_cont_tokens": "c3336566c025bc59"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "d2a95c354bd5bce3",
"hash_cont_tokens": "999a32d098465441"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "477712b69094d77b",
"hash_cont_tokens": "361410848e01f8ed"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "ea00f00108f471d1",
"hash_cont_tokens": "18f9ae57b2444806"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "6771092a57f1064b",
"hash_cont_tokens": "a13496e646060699"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "6434ce770cc3a07d",
"hash_cont_tokens": "791a7a25f0571e59"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "9b84202a0e20279e",
"hash_cont_tokens": "9677b0687811cf73"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "cbd1c4c25d9a95e1",
"hash_cont_tokens": "6393201d9136920e"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "a9d9974081f33401",
"hash_cont_tokens": "17caccbb3a38c7bf"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "4ea19e6b2da621ca",
"hash_cont_tokens": "7128e2eeb930d3b3"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "93e06ed8cb44fcb2",
"hash_cont_tokens": "48e22ae63ee54721"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "10b1be4021766536",
"hash_cont_tokens": "0f40704815d5b3f6"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "6e511aceb2a5cc1f",
"hash_cont_tokens": "a9fdf5917bdddc9b"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "f2250000a60c4675",
"hash_cont_tokens": "c63e45a81fbe97b2"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "1d37f2053687bf09",
"hash_cont_tokens": "9df89edb95ea3c08"
},
"truncated": 0,
"non_truncated": 108,
"padded": 428,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "50a65f6db7781df6",
"hash_cont_tokens": "5b4f21454680a984"
},
"truncated": 0,
"non_truncated": 163,
"padded": 652,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "0214f9e954e7fcf7",
"hash_cont_tokens": "0c2fc7f9e9101fbb"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "3fc286ea903dc9e1",
"hash_cont_tokens": "1279a23b3bc7b32c"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "dfa2c9b7866c93e6",
"hash_cont_tokens": "be76778b3b861344"
},
"truncated": 0,
"non_truncated": 234,
"padded": 936,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "642d259108067cec",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "1f55d640e75559b3",
"hash_cont_tokens": "c61a0f86b50f0556"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "11599cd92aca75c2",
"hash_cont_tokens": "a208a34c74088f6c"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1384,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "9965b932ec67e2ff",
"hash_cont_tokens": "996ce7a5b6c4aef1"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "1da6449a92c60335",
"hash_cont_tokens": "9d4280b06a73f2ad"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "eacd0118cde3a6b6",
"hash_cont_tokens": "9a708d21688a0b16"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "900a2e857049c7fb",
"hash_cont_tokens": "ed0ff6b6c4caf978"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "2368119814fe27da",
"hash_cont_tokens": "4fd1a023ef90b43a"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1120,
"non_padded": 8,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "ba0b150921d1354f",
"hash_cont_tokens": "d2c1c75d7c0e6ec5"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "b80d6f9095fb702f",
"hash_cont_tokens": "ff4c3ef8a56efe40"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "10151b922fe9fdba",
"hash_cont_tokens": "b4566ef91a66db7d"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "ed0c12fa575d30f6",
"hash_cont_tokens": "b713ae56c89df822"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "49436381f9054ab9",
"hash_cont_tokens": "89baef8c4b642ed0"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "9aff724e413681b7",
"hash_cont_tokens": "b92ed9d8dde61395"
},
"truncated": 0,
"non_truncated": 201,
"padded": 784,
"non_padded": 20,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "9103b692a946fc09",
"hash_cont_tokens": "bc75e4dffef3dc0e"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "6dc6ade73ee63cae",
"hash_cont_tokens": "1c1bf88d7c979ef5"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "92ed8eba1ceb58b4",
"hash_cont_tokens": "9fbfaba067301be2"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "405dc01724068f4f",
"hash_cont_tokens": "2aa05ab785b97e1d"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "e512c1d089d1c425",
"hash_cont_tokens": "e5da1ddee7e80213"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "3b8275f3fce8067b",
"hash_cont_tokens": "48bc397569b091d9"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "44ee6f861f86ab7d",
"hash_cont_tokens": "f629e02f9370f1e9"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113413,
"non_padded": 1459,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 2991189.749399989,
"end_time": 2998307.50026319,
"total_evaluation_time_secondes": "7117.750863200985",
"model_name": "saltlux/luxia-21.4b-alignment-v1.0",
"model_sha": "910c73192c30fb51dc94f69777b2ec7cc3a4465b",
"model_dtype": "torch.float16",
"model_size": "47.9 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.7704778156996587,
"acc_stderr": 0.012288926760890797,
"acc_norm": 0.7773037542662116,
"acc_norm_stderr": 0.012158314774829924
},
"harness|hellaswag|10": {
"acc": 0.8136825333598885,
"acc_stderr": 0.003885668963126075,
"acc_norm": 0.9182433778131847,
"acc_norm_stderr": 0.002734336054245144
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6518518518518519,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.6518518518518519,
"acc_norm_stderr": 0.041153246103369526
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.756578947368421,
"acc_stderr": 0.034923496688842384,
"acc_norm": 0.756578947368421,
"acc_norm_stderr": 0.034923496688842384
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.74,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.74,
"acc_norm_stderr": 0.044084400227680794
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7358490566037735,
"acc_stderr": 0.027134291628741713,
"acc_norm": 0.7358490566037735,
"acc_norm_stderr": 0.027134291628741713
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.8402777777777778,
"acc_stderr": 0.03063557897209328,
"acc_norm": 0.8402777777777778,
"acc_norm_stderr": 0.03063557897209328
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.62,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6647398843930635,
"acc_stderr": 0.03599586301247077,
"acc_norm": 0.6647398843930635,
"acc_norm_stderr": 0.03599586301247077
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4019607843137255,
"acc_stderr": 0.04878608714466996,
"acc_norm": 0.4019607843137255,
"acc_norm_stderr": 0.04878608714466996
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.82,
"acc_stderr": 0.03861229196653695,
"acc_norm": 0.82,
"acc_norm_stderr": 0.03861229196653695
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.6851063829787234,
"acc_stderr": 0.030363582197238174,
"acc_norm": 0.6851063829787234,
"acc_norm_stderr": 0.030363582197238174
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5614035087719298,
"acc_stderr": 0.04668000738510455,
"acc_norm": 0.5614035087719298,
"acc_norm_stderr": 0.04668000738510455
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.6275862068965518,
"acc_stderr": 0.04028731532947558,
"acc_norm": 0.6275862068965518,
"acc_norm_stderr": 0.04028731532947558
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.025733641991838987,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.025733641991838987
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4603174603174603,
"acc_stderr": 0.04458029125470973,
"acc_norm": 0.4603174603174603,
"acc_norm_stderr": 0.04458029125470973
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.047609522856952365,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952365
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.8354838709677419,
"acc_stderr": 0.02109084774593932,
"acc_norm": 0.8354838709677419,
"acc_norm_stderr": 0.02109084774593932
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5911330049261084,
"acc_stderr": 0.03459058815883232,
"acc_norm": 0.5911330049261084,
"acc_norm_stderr": 0.03459058815883232
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8242424242424242,
"acc_stderr": 0.02972094300622445,
"acc_norm": 0.8242424242424242,
"acc_norm_stderr": 0.02972094300622445
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8484848484848485,
"acc_stderr": 0.02554565042660362,
"acc_norm": 0.8484848484848485,
"acc_norm_stderr": 0.02554565042660362
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8860103626943006,
"acc_stderr": 0.02293514405391943,
"acc_norm": 0.8860103626943006,
"acc_norm_stderr": 0.02293514405391943
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.7051282051282052,
"acc_stderr": 0.0231193627582323,
"acc_norm": 0.7051282051282052,
"acc_norm_stderr": 0.0231193627582323
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.37407407407407406,
"acc_stderr": 0.029502861128955286,
"acc_norm": 0.37407407407407406,
"acc_norm_stderr": 0.029502861128955286
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.7689075630252101,
"acc_stderr": 0.027381406927868893,
"acc_norm": 0.7689075630252101,
"acc_norm_stderr": 0.027381406927868893
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.44370860927152317,
"acc_stderr": 0.04056527902281731,
"acc_norm": 0.44370860927152317,
"acc_norm_stderr": 0.04056527902281731
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8587155963302753,
"acc_stderr": 0.014933868987028072,
"acc_norm": 0.8587155963302753,
"acc_norm_stderr": 0.014933868987028072
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5925925925925926,
"acc_stderr": 0.03350991604696044,
"acc_norm": 0.5925925925925926,
"acc_norm_stderr": 0.03350991604696044
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8676470588235294,
"acc_stderr": 0.02378429752091885,
"acc_norm": 0.8676470588235294,
"acc_norm_stderr": 0.02378429752091885
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8481012658227848,
"acc_stderr": 0.023363878096632443,
"acc_norm": 0.8481012658227848,
"acc_norm_stderr": 0.023363878096632443
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7354260089686099,
"acc_stderr": 0.029605103217038325,
"acc_norm": 0.7354260089686099,
"acc_norm_stderr": 0.029605103217038325
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6793893129770993,
"acc_stderr": 0.04093329229834278,
"acc_norm": 0.6793893129770993,
"acc_norm_stderr": 0.04093329229834278
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8429752066115702,
"acc_stderr": 0.03321244842547128,
"acc_norm": 0.8429752066115702,
"acc_norm_stderr": 0.03321244842547128
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7685185185185185,
"acc_stderr": 0.04077494709252626,
"acc_norm": 0.7685185185185185,
"acc_norm_stderr": 0.04077494709252626
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.754601226993865,
"acc_stderr": 0.03380939813943354,
"acc_norm": 0.754601226993865,
"acc_norm_stderr": 0.03380939813943354
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.45535714285714285,
"acc_stderr": 0.04726835553719099,
"acc_norm": 0.45535714285714285,
"acc_norm_stderr": 0.04726835553719099
},
"harness|hendrycksTest-management|5": {
"acc": 0.8446601941747572,
"acc_stderr": 0.03586594738573974,
"acc_norm": 0.8446601941747572,
"acc_norm_stderr": 0.03586594738573974
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8974358974358975,
"acc_stderr": 0.019875655027867447,
"acc_norm": 0.8974358974358975,
"acc_norm_stderr": 0.019875655027867447
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.72,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.72,
"acc_norm_stderr": 0.045126085985421276
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8122605363984674,
"acc_stderr": 0.013964393769899115,
"acc_norm": 0.8122605363984674,
"acc_norm_stderr": 0.013964393769899115
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7196531791907514,
"acc_stderr": 0.02418242749657761,
"acc_norm": 0.7196531791907514,
"acc_norm_stderr": 0.02418242749657761
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4748603351955307,
"acc_stderr": 0.016701350842682632,
"acc_norm": 0.4748603351955307,
"acc_norm_stderr": 0.016701350842682632
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7843137254901961,
"acc_stderr": 0.02355083135199509,
"acc_norm": 0.7843137254901961,
"acc_norm_stderr": 0.02355083135199509
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7363344051446945,
"acc_stderr": 0.02502553850053234,
"acc_norm": 0.7363344051446945,
"acc_norm_stderr": 0.02502553850053234
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.023132376234543353,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.023132376234543353
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.549645390070922,
"acc_stderr": 0.02968010556502904,
"acc_norm": 0.549645390070922,
"acc_norm_stderr": 0.02968010556502904
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.48826597131681876,
"acc_stderr": 0.012766719019686724,
"acc_norm": 0.48826597131681876,
"acc_norm_stderr": 0.012766719019686724
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6838235294117647,
"acc_stderr": 0.028245687391462927,
"acc_norm": 0.6838235294117647,
"acc_norm_stderr": 0.028245687391462927
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6813725490196079,
"acc_stderr": 0.018850084696468723,
"acc_norm": 0.6813725490196079,
"acc_norm_stderr": 0.018850084696468723
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.746938775510204,
"acc_stderr": 0.027833023871399687,
"acc_norm": 0.746938775510204,
"acc_norm_stderr": 0.027833023871399687
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8308457711442786,
"acc_stderr": 0.026508590656233268,
"acc_norm": 0.8308457711442786,
"acc_norm_stderr": 0.026508590656233268
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.035887028128263686,
"acc_norm": 0.85,
"acc_norm_stderr": 0.035887028128263686
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5542168674698795,
"acc_stderr": 0.038695433234721015,
"acc_norm": 0.5542168674698795,
"acc_norm_stderr": 0.038695433234721015
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8070175438596491,
"acc_stderr": 0.030267457554898458,
"acc_norm": 0.8070175438596491,
"acc_norm_stderr": 0.030267457554898458
},
"harness|truthfulqa:mc|0": {
"mc1": 0.6585067319461444,
"mc1_stderr": 0.016600688619950826,
"mc2": 0.7920184341773266,
"mc2_stderr": 0.013279677093259622
},
"harness|winogrande|5": {
"acc": 0.8737174427782163,
"acc_stderr": 0.009335559129908468
},
"harness|gsm8k|5": {
"acc": 0.6224412433661866,
"acc_stderr": 0.013353150666358535
},
"all": {
"acc": 0.686346820158109,
"acc_stderr": 0.03136390908463311,
"acc_norm": 0.6861421038393235,
"acc_norm_stderr": 0.03202081016053238,
"mc1": 0.6585067319461444,
"mc1_stderr": 0.016600688619950826,
"mc2": 0.7920184341773266,
"mc2_stderr": 0.013279677093259622
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "61360774732b48c5",
"hash_cont_tokens": "2c7776913d1cd316"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4664,
"non_padded": 23,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "dfafe55b3ee2e03f",
"hash_cont_tokens": "47ccd02cc15ea606"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40007,
"non_padded": 161,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "02f647d1e7128cbe",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "94b062ef497b6da1",
"hash_cont_tokens": "3448d00acc7a11c6"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "09648295653479cc",
"hash_cont_tokens": "9b5285416fa903e2"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "d1b7b508398e832b",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "36d0486204081242",
"hash_cont_tokens": "0382995cfcc24e3e"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1056,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "abb1a1e52668c5d2",
"hash_cont_tokens": "09ef20d27e0286fe"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "61e2034b4c6e4654",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "82070fd3e9c11558",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "cddd953fdc669e9c",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "b0c7c0bb0f29fd40",
"hash_cont_tokens": "2115091b39764e96"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "a110634091f861c3",
"hash_cont_tokens": "253e8f65a34d2f2b"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "b6add64b6e73a687",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "bc29b24a542d16e5",
"hash_cont_tokens": "863770146d3e3341"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "90bc926de760a574",
"hash_cont_tokens": "547784fe0135a15c"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "f950537458fba1cd",
"hash_cont_tokens": "545e7978a9a2e921"
},
"truncated": 0,
"non_truncated": 145,
"padded": 576,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "e449f8e0bbbffd70",
"hash_cont_tokens": "6220dafecd3e71a1"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "d6375de8b6e2852d",
"hash_cont_tokens": "9e1c83b748056f05"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "58fc4ea768c14475",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "ddd3edff37564eb2",
"hash_cont_tokens": "c0ee938431d4cce1"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "4afbcd19f348405b",
"hash_cont_tokens": "2fd86b22bfa1c8cb"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "6915f846cbbc1376",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "8c3bf3dd4b29788b",
"hash_cont_tokens": "8d52dfdbe7373dec"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "0833402461c795d7",
"hash_cont_tokens": "7daa2bbedae272e1"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "e655d173f257ba00",
"hash_cont_tokens": "530e7985f90589ad"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "ef9a976db5ae26bf",
"hash_cont_tokens": "8abfdac40b0aa157"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "47168004b37b8b1b",
"hash_cont_tokens": "0450a3d8e715e926"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "cc31b962af40a3a9",
"hash_cont_tokens": "3e477b8a15ec619c"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "e25d3a4bd30d4b9b",
"hash_cont_tokens": "f0648b1ae17e3c3f"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "81f1d60b0f28f1dd",
"hash_cont_tokens": "71a621b85c8384ec"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "cd8eff0a7fea8499",
"hash_cont_tokens": "507dec89f16c35ea"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "7e8a7b287cc950ce",
"hash_cont_tokens": "fe66e65deac902bb"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "637b13e89f48b91d",
"hash_cont_tokens": "7fe519011d639dc8"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "d79c0f32320628be",
"hash_cont_tokens": "77ba99656e04ddd0"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "0eb947a1d4a571d3",
"hash_cont_tokens": "bc8f34ada52ca31e"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "144b6f5b0ee2c132",
"hash_cont_tokens": "d4b66c0f10b911b8"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "5742978f580307e8",
"hash_cont_tokens": "f7ea9e092aff54a4"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "695ba87b872556f3",
"hash_cont_tokens": "9e305ec3d994de5c"
},
"truncated": 0,
"non_truncated": 163,
"padded": 648,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "76e0f333901e7a92",
"hash_cont_tokens": "85f6ff4f34ded537"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "cc757739b8954b9b",
"hash_cont_tokens": "1f24f5bf907f5f28"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "4f2988aaab0a7e07",
"hash_cont_tokens": "37062ffd1e129b49"
},
"truncated": 0,
"non_truncated": 234,
"padded": 932,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "9f045a466a2fa245",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "8abb58943afb732c",
"hash_cont_tokens": "64725e71e0bff006"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "034c4d68002216dc",
"hash_cont_tokens": "d73b7e792a1de62d"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1368,
"non_padded": 16,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "1cf1ba5ed283c147",
"hash_cont_tokens": "291bc548e95ea24c"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "4858f94c557e069a",
"hash_cont_tokens": "4159368fbefa62ba"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "bc1a814aa2ec0d97",
"hash_cont_tokens": "b3758c79335b5e25"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1240,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "d82f5763c7c0aeab",
"hash_cont_tokens": "c7aff90b52b3c210"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "4b98da57fa827859",
"hash_cont_tokens": "8fd4fe19db20b33f"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1128,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "a1223f558c8e6a22",
"hash_cont_tokens": "70fdfc3a3cdab2b2"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "25f9537254439cce",
"hash_cont_tokens": "2662c15f3eee1572"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "68981a198cecbd8b",
"hash_cont_tokens": "7b998c3f691a5888"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "2b9983200889161c",
"hash_cont_tokens": "9884d7f2589a4eec"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "6b5b6702fe4cedad",
"hash_cont_tokens": "87576f25f4731ef0"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "1cf3f4312411d9f5",
"hash_cont_tokens": "bba9af89c33fad2f"
},
"truncated": 0,
"non_truncated": 201,
"padded": 776,
"non_padded": 28,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "623fded50b4331b3",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "cf094bede564ab54",
"hash_cont_tokens": "16a5fb37a6047671"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "678d2329b9939306",
"hash_cont_tokens": "65fd69dde784be8d"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "373018952b5d17a6",
"hash_cont_tokens": "48248f31331ca20c"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "b119b996a57751b5",
"hash_cont_tokens": "5ffbcf0b87304360"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "821f45bcb336938c",
"hash_cont_tokens": "3f75cab3e2e46402"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "6925ffa4c3f930fe",
"hash_cont_tokens": "2b89653b20cd6c4a"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113301,
"non_padded": 1571,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 3017516.883374964,
"end_time": 3024496.389715243,
"total_evaluation_time_secondes": "6979.506340279244",
"model_name": "saltlux/luxia-21.4b-alignment-v1.0",
"model_sha": "910c73192c30fb51dc94f69777b2ec7cc3a4465b",
"model_dtype": "torch.float16",
"model_size": "47.9 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.7704778156996587,
"acc_stderr": 0.012288926760890797,
"acc_norm": 0.7773037542662116,
"acc_norm_stderr": 0.012158314774829924
},
"harness|hellaswag|10": {
"acc": 0.8136825333598885,
"acc_stderr": 0.003885668963126075,
"acc_norm": 0.9182433778131847,
"acc_norm_stderr": 0.002734336054245144
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6518518518518519,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.6518518518518519,
"acc_norm_stderr": 0.041153246103369526
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.756578947368421,
"acc_stderr": 0.034923496688842384,
"acc_norm": 0.756578947368421,
"acc_norm_stderr": 0.034923496688842384
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.74,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.74,
"acc_norm_stderr": 0.044084400227680794
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7358490566037735,
"acc_stderr": 0.027134291628741713,
"acc_norm": 0.7358490566037735,
"acc_norm_stderr": 0.027134291628741713
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.8402777777777778,
"acc_stderr": 0.03063557897209328,
"acc_norm": 0.8402777777777778,
"acc_norm_stderr": 0.03063557897209328
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.62,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6647398843930635,
"acc_stderr": 0.03599586301247077,
"acc_norm": 0.6647398843930635,
"acc_norm_stderr": 0.03599586301247077
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4019607843137255,
"acc_stderr": 0.04878608714466996,
"acc_norm": 0.4019607843137255,
"acc_norm_stderr": 0.04878608714466996
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.82,
"acc_stderr": 0.03861229196653695,
"acc_norm": 0.82,
"acc_norm_stderr": 0.03861229196653695
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.6851063829787234,
"acc_stderr": 0.030363582197238174,
"acc_norm": 0.6851063829787234,
"acc_norm_stderr": 0.030363582197238174
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5614035087719298,
"acc_stderr": 0.04668000738510455,
"acc_norm": 0.5614035087719298,
"acc_norm_stderr": 0.04668000738510455
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.6275862068965518,
"acc_stderr": 0.04028731532947558,
"acc_norm": 0.6275862068965518,
"acc_norm_stderr": 0.04028731532947558
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.025733641991838987,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.025733641991838987
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4603174603174603,
"acc_stderr": 0.04458029125470973,
"acc_norm": 0.4603174603174603,
"acc_norm_stderr": 0.04458029125470973
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.047609522856952365,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952365
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.8354838709677419,
"acc_stderr": 0.02109084774593932,
"acc_norm": 0.8354838709677419,
"acc_norm_stderr": 0.02109084774593932
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5911330049261084,
"acc_stderr": 0.03459058815883232,
"acc_norm": 0.5911330049261084,
"acc_norm_stderr": 0.03459058815883232
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8242424242424242,
"acc_stderr": 0.02972094300622445,
"acc_norm": 0.8242424242424242,
"acc_norm_stderr": 0.02972094300622445
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8484848484848485,
"acc_stderr": 0.02554565042660362,
"acc_norm": 0.8484848484848485,
"acc_norm_stderr": 0.02554565042660362
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8860103626943006,
"acc_stderr": 0.02293514405391943,
"acc_norm": 0.8860103626943006,
"acc_norm_stderr": 0.02293514405391943
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.7051282051282052,
"acc_stderr": 0.0231193627582323,
"acc_norm": 0.7051282051282052,
"acc_norm_stderr": 0.0231193627582323
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.37407407407407406,
"acc_stderr": 0.029502861128955286,
"acc_norm": 0.37407407407407406,
"acc_norm_stderr": 0.029502861128955286
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.7689075630252101,
"acc_stderr": 0.027381406927868893,
"acc_norm": 0.7689075630252101,
"acc_norm_stderr": 0.027381406927868893
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.44370860927152317,
"acc_stderr": 0.04056527902281731,
"acc_norm": 0.44370860927152317,
"acc_norm_stderr": 0.04056527902281731
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8587155963302753,
"acc_stderr": 0.014933868987028072,
"acc_norm": 0.8587155963302753,
"acc_norm_stderr": 0.014933868987028072
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5925925925925926,
"acc_stderr": 0.03350991604696044,
"acc_norm": 0.5925925925925926,
"acc_norm_stderr": 0.03350991604696044
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8676470588235294,
"acc_stderr": 0.02378429752091885,
"acc_norm": 0.8676470588235294,
"acc_norm_stderr": 0.02378429752091885
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8481012658227848,
"acc_stderr": 0.023363878096632443,
"acc_norm": 0.8481012658227848,
"acc_norm_stderr": 0.023363878096632443
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7354260089686099,
"acc_stderr": 0.029605103217038325,
"acc_norm": 0.7354260089686099,
"acc_norm_stderr": 0.029605103217038325
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6793893129770993,
"acc_stderr": 0.04093329229834278,
"acc_norm": 0.6793893129770993,
"acc_norm_stderr": 0.04093329229834278
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8429752066115702,
"acc_stderr": 0.03321244842547128,
"acc_norm": 0.8429752066115702,
"acc_norm_stderr": 0.03321244842547128
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7685185185185185,
"acc_stderr": 0.04077494709252626,
"acc_norm": 0.7685185185185185,
"acc_norm_stderr": 0.04077494709252626
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.754601226993865,
"acc_stderr": 0.03380939813943354,
"acc_norm": 0.754601226993865,
"acc_norm_stderr": 0.03380939813943354
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.45535714285714285,
"acc_stderr": 0.04726835553719099,
"acc_norm": 0.45535714285714285,
"acc_norm_stderr": 0.04726835553719099
},
"harness|hendrycksTest-management|5": {
"acc": 0.8446601941747572,
"acc_stderr": 0.03586594738573974,
"acc_norm": 0.8446601941747572,
"acc_norm_stderr": 0.03586594738573974
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8974358974358975,
"acc_stderr": 0.019875655027867447,
"acc_norm": 0.8974358974358975,
"acc_norm_stderr": 0.019875655027867447
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.72,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.72,
"acc_norm_stderr": 0.045126085985421276
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8122605363984674,
"acc_stderr": 0.013964393769899115,
"acc_norm": 0.8122605363984674,
"acc_norm_stderr": 0.013964393769899115
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7196531791907514,
"acc_stderr": 0.02418242749657761,
"acc_norm": 0.7196531791907514,
"acc_norm_stderr": 0.02418242749657761
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4748603351955307,
"acc_stderr": 0.016701350842682632,
"acc_norm": 0.4748603351955307,
"acc_norm_stderr": 0.016701350842682632
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7843137254901961,
"acc_stderr": 0.02355083135199509,
"acc_norm": 0.7843137254901961,
"acc_norm_stderr": 0.02355083135199509
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7363344051446945,
"acc_stderr": 0.02502553850053234,
"acc_norm": 0.7363344051446945,
"acc_norm_stderr": 0.02502553850053234
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.023132376234543353,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.023132376234543353
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.549645390070922,
"acc_stderr": 0.02968010556502904,
"acc_norm": 0.549645390070922,
"acc_norm_stderr": 0.02968010556502904
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.48826597131681876,
"acc_stderr": 0.012766719019686724,
"acc_norm": 0.48826597131681876,
"acc_norm_stderr": 0.012766719019686724
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6838235294117647,
"acc_stderr": 0.028245687391462927,
"acc_norm": 0.6838235294117647,
"acc_norm_stderr": 0.028245687391462927
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6813725490196079,
"acc_stderr": 0.018850084696468723,
"acc_norm": 0.6813725490196079,
"acc_norm_stderr": 0.018850084696468723
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.746938775510204,
"acc_stderr": 0.027833023871399687,
"acc_norm": 0.746938775510204,
"acc_norm_stderr": 0.027833023871399687
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8308457711442786,
"acc_stderr": 0.026508590656233268,
"acc_norm": 0.8308457711442786,
"acc_norm_stderr": 0.026508590656233268
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.035887028128263686,
"acc_norm": 0.85,
"acc_norm_stderr": 0.035887028128263686
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5542168674698795,
"acc_stderr": 0.038695433234721015,
"acc_norm": 0.5542168674698795,
"acc_norm_stderr": 0.038695433234721015
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8070175438596491,
"acc_stderr": 0.030267457554898458,
"acc_norm": 0.8070175438596491,
"acc_norm_stderr": 0.030267457554898458
},
"harness|truthfulqa:mc|0": {
"mc1": 0.6585067319461444,
"mc1_stderr": 0.016600688619950826,
"mc2": 0.7920184341773266,
"mc2_stderr": 0.013279677093259622
},
"harness|winogrande|5": {
"acc": 0.8737174427782163,
"acc_stderr": 0.009335559129908468
},
"harness|gsm8k|5": {
"acc": 0.6224412433661866,
"acc_stderr": 0.013353150666358535
},
"all": {
"acc": 0.686346820158109,
"acc_stderr": 0.03136390908463311,
"acc_norm": 0.6861421038393235,
"acc_norm_stderr": 0.03202081016053238,
"mc1": 0.6585067319461444,
"mc1_stderr": 0.016600688619950826,
"mc2": 0.7920184341773266,
"mc2_stderr": 0.013279677093259622
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "61360774732b48c5",
"hash_cont_tokens": "2c7776913d1cd316"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4664,
"non_padded": 23,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "dfafe55b3ee2e03f",
"hash_cont_tokens": "47ccd02cc15ea606"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40007,
"non_padded": 161,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "02f647d1e7128cbe",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "94b062ef497b6da1",
"hash_cont_tokens": "3448d00acc7a11c6"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "09648295653479cc",
"hash_cont_tokens": "9b5285416fa903e2"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "d1b7b508398e832b",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "36d0486204081242",
"hash_cont_tokens": "0382995cfcc24e3e"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1056,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "abb1a1e52668c5d2",
"hash_cont_tokens": "09ef20d27e0286fe"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "61e2034b4c6e4654",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "82070fd3e9c11558",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "cddd953fdc669e9c",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "b0c7c0bb0f29fd40",
"hash_cont_tokens": "2115091b39764e96"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "a110634091f861c3",
"hash_cont_tokens": "253e8f65a34d2f2b"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "b6add64b6e73a687",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "bc29b24a542d16e5",
"hash_cont_tokens": "863770146d3e3341"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "90bc926de760a574",
"hash_cont_tokens": "547784fe0135a15c"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "f950537458fba1cd",
"hash_cont_tokens": "545e7978a9a2e921"
},
"truncated": 0,
"non_truncated": 145,
"padded": 576,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "e449f8e0bbbffd70",
"hash_cont_tokens": "6220dafecd3e71a1"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "d6375de8b6e2852d",
"hash_cont_tokens": "9e1c83b748056f05"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "58fc4ea768c14475",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "ddd3edff37564eb2",
"hash_cont_tokens": "c0ee938431d4cce1"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "4afbcd19f348405b",
"hash_cont_tokens": "2fd86b22bfa1c8cb"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "6915f846cbbc1376",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "8c3bf3dd4b29788b",
"hash_cont_tokens": "8d52dfdbe7373dec"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "0833402461c795d7",
"hash_cont_tokens": "7daa2bbedae272e1"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "e655d173f257ba00",
"hash_cont_tokens": "530e7985f90589ad"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "ef9a976db5ae26bf",
"hash_cont_tokens": "8abfdac40b0aa157"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "47168004b37b8b1b",
"hash_cont_tokens": "0450a3d8e715e926"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "cc31b962af40a3a9",
"hash_cont_tokens": "3e477b8a15ec619c"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "e25d3a4bd30d4b9b",
"hash_cont_tokens": "f0648b1ae17e3c3f"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "81f1d60b0f28f1dd",
"hash_cont_tokens": "71a621b85c8384ec"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "cd8eff0a7fea8499",
"hash_cont_tokens": "507dec89f16c35ea"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "7e8a7b287cc950ce",
"hash_cont_tokens": "fe66e65deac902bb"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "637b13e89f48b91d",
"hash_cont_tokens": "7fe519011d639dc8"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "d79c0f32320628be",
"hash_cont_tokens": "77ba99656e04ddd0"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "0eb947a1d4a571d3",
"hash_cont_tokens": "bc8f34ada52ca31e"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "144b6f5b0ee2c132",
"hash_cont_tokens": "d4b66c0f10b911b8"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "5742978f580307e8",
"hash_cont_tokens": "f7ea9e092aff54a4"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "695ba87b872556f3",
"hash_cont_tokens": "9e305ec3d994de5c"
},
"truncated": 0,
"non_truncated": 163,
"padded": 648,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "76e0f333901e7a92",
"hash_cont_tokens": "85f6ff4f34ded537"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "cc757739b8954b9b",
"hash_cont_tokens": "1f24f5bf907f5f28"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "4f2988aaab0a7e07",
"hash_cont_tokens": "37062ffd1e129b49"
},
"truncated": 0,
"non_truncated": 234,
"padded": 932,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "9f045a466a2fa245",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "8abb58943afb732c",
"hash_cont_tokens": "64725e71e0bff006"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "034c4d68002216dc",
"hash_cont_tokens": "d73b7e792a1de62d"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1368,
"non_padded": 16,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "1cf1ba5ed283c147",
"hash_cont_tokens": "291bc548e95ea24c"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "4858f94c557e069a",
"hash_cont_tokens": "4159368fbefa62ba"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "bc1a814aa2ec0d97",
"hash_cont_tokens": "b3758c79335b5e25"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1240,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "d82f5763c7c0aeab",
"hash_cont_tokens": "c7aff90b52b3c210"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "4b98da57fa827859",
"hash_cont_tokens": "8fd4fe19db20b33f"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1128,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "a1223f558c8e6a22",
"hash_cont_tokens": "70fdfc3a3cdab2b2"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "25f9537254439cce",
"hash_cont_tokens": "2662c15f3eee1572"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "68981a198cecbd8b",
"hash_cont_tokens": "7b998c3f691a5888"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "2b9983200889161c",
"hash_cont_tokens": "9884d7f2589a4eec"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "6b5b6702fe4cedad",
"hash_cont_tokens": "87576f25f4731ef0"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "1cf3f4312411d9f5",
"hash_cont_tokens": "bba9af89c33fad2f"
},
"truncated": 0,
"non_truncated": 201,
"padded": 776,
"non_padded": 28,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "623fded50b4331b3",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "cf094bede564ab54",
"hash_cont_tokens": "16a5fb37a6047671"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "678d2329b9939306",
"hash_cont_tokens": "65fd69dde784be8d"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "373018952b5d17a6",
"hash_cont_tokens": "48248f31331ca20c"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "b119b996a57751b5",
"hash_cont_tokens": "5ffbcf0b87304360"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "821f45bcb336938c",
"hash_cont_tokens": "3f75cab3e2e46402"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "6925ffa4c3f930fe",
"hash_cont_tokens": "2b89653b20cd6c4a"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113301,
"non_padded": 1571,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 3587148.223616212,
"end_time": 3593933.508071978,
"total_evaluation_time_secondes": "6785.28445576597",
"model_name": "saltlux/luxia-21.4b-alignment-v1.0",
"model_sha": "ba3403eaafc6d1f6e3a73245314ee96025c08d96",
"model_dtype": "torch.bfloat16",
"model_size": "47.9 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.7627986348122867,
"acc_stderr": 0.012430399829260851,
"acc_norm": 0.7747440273037542,
"acc_norm_stderr": 0.012207839995407314
},
"harness|hellaswag|10": {
"acc": 0.8125871340370444,
"acc_stderr": 0.0038944505016930363,
"acc_norm": 0.9188408683529178,
"acc_norm_stderr": 0.0027252124485788636
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6518518518518519,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.6518518518518519,
"acc_norm_stderr": 0.041153246103369526
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7763157894736842,
"acc_stderr": 0.03391160934343604,
"acc_norm": 0.7763157894736842,
"acc_norm_stderr": 0.03391160934343604
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.73,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.73,
"acc_norm_stderr": 0.04461960433384741
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7358490566037735,
"acc_stderr": 0.027134291628741713,
"acc_norm": 0.7358490566037735,
"acc_norm_stderr": 0.027134291628741713
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.8472222222222222,
"acc_stderr": 0.030085743248565666,
"acc_norm": 0.8472222222222222,
"acc_norm_stderr": 0.030085743248565666
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.44,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.44,
"acc_norm_stderr": 0.049888765156985884
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6647398843930635,
"acc_stderr": 0.03599586301247077,
"acc_norm": 0.6647398843930635,
"acc_norm_stderr": 0.03599586301247077
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.43137254901960786,
"acc_stderr": 0.04928099597287534,
"acc_norm": 0.43137254901960786,
"acc_norm_stderr": 0.04928099597287534
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.81,
"acc_stderr": 0.039427724440366234,
"acc_norm": 0.81,
"acc_norm_stderr": 0.039427724440366234
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.6808510638297872,
"acc_stderr": 0.030472973363380042,
"acc_norm": 0.6808510638297872,
"acc_norm_stderr": 0.030472973363380042
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5614035087719298,
"acc_stderr": 0.04668000738510455,
"acc_norm": 0.5614035087719298,
"acc_norm_stderr": 0.04668000738510455
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.6137931034482759,
"acc_stderr": 0.04057324734419036,
"acc_norm": 0.6137931034482759,
"acc_norm_stderr": 0.04057324734419036
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.5211640211640212,
"acc_stderr": 0.025728230952130726,
"acc_norm": 0.5211640211640212,
"acc_norm_stderr": 0.025728230952130726
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.46825396825396826,
"acc_stderr": 0.04463112720677172,
"acc_norm": 0.46825396825396826,
"acc_norm_stderr": 0.04463112720677172
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.8354838709677419,
"acc_stderr": 0.02109084774593932,
"acc_norm": 0.8354838709677419,
"acc_norm_stderr": 0.02109084774593932
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6059113300492611,
"acc_stderr": 0.034381579670365446,
"acc_norm": 0.6059113300492611,
"acc_norm_stderr": 0.034381579670365446
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.73,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.73,
"acc_norm_stderr": 0.0446196043338474
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8242424242424242,
"acc_stderr": 0.02972094300622445,
"acc_norm": 0.8242424242424242,
"acc_norm_stderr": 0.02972094300622445
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8434343434343434,
"acc_stderr": 0.025890520358141454,
"acc_norm": 0.8434343434343434,
"acc_norm_stderr": 0.025890520358141454
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8860103626943006,
"acc_stderr": 0.02293514405391943,
"acc_norm": 0.8860103626943006,
"acc_norm_stderr": 0.02293514405391943
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.7,
"acc_stderr": 0.023234581088428494,
"acc_norm": 0.7,
"acc_norm_stderr": 0.023234581088428494
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.36666666666666664,
"acc_stderr": 0.029381620726465076,
"acc_norm": 0.36666666666666664,
"acc_norm_stderr": 0.029381620726465076
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.7605042016806722,
"acc_stderr": 0.027722065493361262,
"acc_norm": 0.7605042016806722,
"acc_norm_stderr": 0.027722065493361262
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.4370860927152318,
"acc_stderr": 0.04050035722230636,
"acc_norm": 0.4370860927152318,
"acc_norm_stderr": 0.04050035722230636
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8587155963302753,
"acc_stderr": 0.014933868987028072,
"acc_norm": 0.8587155963302753,
"acc_norm_stderr": 0.014933868987028072
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5972222222222222,
"acc_stderr": 0.03344887382997865,
"acc_norm": 0.5972222222222222,
"acc_norm_stderr": 0.03344887382997865
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8676470588235294,
"acc_stderr": 0.02378429752091885,
"acc_norm": 0.8676470588235294,
"acc_norm_stderr": 0.02378429752091885
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8481012658227848,
"acc_stderr": 0.023363878096632443,
"acc_norm": 0.8481012658227848,
"acc_norm_stderr": 0.023363878096632443
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7354260089686099,
"acc_stderr": 0.029605103217038325,
"acc_norm": 0.7354260089686099,
"acc_norm_stderr": 0.029605103217038325
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6717557251908397,
"acc_stderr": 0.04118438565806298,
"acc_norm": 0.6717557251908397,
"acc_norm_stderr": 0.04118438565806298
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8429752066115702,
"acc_stderr": 0.03321244842547128,
"acc_norm": 0.8429752066115702,
"acc_norm_stderr": 0.03321244842547128
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.040191074725573483,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.040191074725573483
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.754601226993865,
"acc_stderr": 0.03380939813943354,
"acc_norm": 0.754601226993865,
"acc_norm_stderr": 0.03380939813943354
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4732142857142857,
"acc_stderr": 0.047389751192741546,
"acc_norm": 0.4732142857142857,
"acc_norm_stderr": 0.047389751192741546
},
"harness|hendrycksTest-management|5": {
"acc": 0.8446601941747572,
"acc_stderr": 0.03586594738573974,
"acc_norm": 0.8446601941747572,
"acc_norm_stderr": 0.03586594738573974
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8974358974358975,
"acc_stderr": 0.019875655027867447,
"acc_norm": 0.8974358974358975,
"acc_norm_stderr": 0.019875655027867447
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.73,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8109833971902938,
"acc_stderr": 0.014000791294407,
"acc_norm": 0.8109833971902938,
"acc_norm_stderr": 0.014000791294407
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7138728323699421,
"acc_stderr": 0.02433214677913413,
"acc_norm": 0.7138728323699421,
"acc_norm_stderr": 0.02433214677913413
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4659217877094972,
"acc_stderr": 0.016683615837486863,
"acc_norm": 0.4659217877094972,
"acc_norm_stderr": 0.016683615837486863
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7810457516339869,
"acc_stderr": 0.02367908986180772,
"acc_norm": 0.7810457516339869,
"acc_norm_stderr": 0.02367908986180772
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7363344051446945,
"acc_stderr": 0.02502553850053234,
"acc_norm": 0.7363344051446945,
"acc_norm_stderr": 0.02502553850053234
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7839506172839507,
"acc_stderr": 0.02289916291844579,
"acc_norm": 0.7839506172839507,
"acc_norm_stderr": 0.02289916291844579
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.5531914893617021,
"acc_stderr": 0.029658235097666907,
"acc_norm": 0.5531914893617021,
"acc_norm_stderr": 0.029658235097666907
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.48370273794002605,
"acc_stderr": 0.012763450734699817,
"acc_norm": 0.48370273794002605,
"acc_norm_stderr": 0.012763450734699817
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6838235294117647,
"acc_stderr": 0.02824568739146293,
"acc_norm": 0.6838235294117647,
"acc_norm_stderr": 0.02824568739146293
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6813725490196079,
"acc_stderr": 0.01885008469646872,
"acc_norm": 0.6813725490196079,
"acc_norm_stderr": 0.01885008469646872
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6636363636363637,
"acc_stderr": 0.04525393596302505,
"acc_norm": 0.6636363636363637,
"acc_norm_stderr": 0.04525393596302505
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7510204081632653,
"acc_stderr": 0.027682979522960238,
"acc_norm": 0.7510204081632653,
"acc_norm_stderr": 0.027682979522960238
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8308457711442786,
"acc_stderr": 0.02650859065623327,
"acc_norm": 0.8308457711442786,
"acc_norm_stderr": 0.02650859065623327
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.035887028128263686,
"acc_norm": 0.85,
"acc_norm_stderr": 0.035887028128263686
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5542168674698795,
"acc_stderr": 0.038695433234721015,
"acc_norm": 0.5542168674698795,
"acc_norm_stderr": 0.038695433234721015
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8187134502923976,
"acc_stderr": 0.029547741687640038,
"acc_norm": 0.8187134502923976,
"acc_norm_stderr": 0.029547741687640038
},
"harness|truthfulqa:mc|0": {
"mc1": 0.6523867809057528,
"mc1_stderr": 0.01667076918889731,
"mc2": 0.791656253485744,
"mc2_stderr": 0.01329262162821789
},
"harness|winogrande|5": {
"acc": 0.8745067087608525,
"acc_stderr": 0.009310542237486182
},
"harness|gsm8k|5": {
"acc": 0.6239575435936315,
"acc_stderr": 0.013342532064849767
},
"all": {
"acc": 0.6866913238774542,
"acc_stderr": 0.03138668671631704,
"acc_norm": 0.6865746717114842,
"acc_norm_stderr": 0.03204310199162772,
"mc1": 0.6523867809057528,
"mc1_stderr": 0.01667076918889731,
"mc2": 0.791656253485744,
"mc2_stderr": 0.01329262162821789
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "61360774732b48c5",
"hash_cont_tokens": "2c7776913d1cd316"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4664,
"non_padded": 23,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "dfafe55b3ee2e03f",
"hash_cont_tokens": "47ccd02cc15ea606"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40007,
"non_padded": 161,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "02f647d1e7128cbe",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "94b062ef497b6da1",
"hash_cont_tokens": "3448d00acc7a11c6"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "09648295653479cc",
"hash_cont_tokens": "9b5285416fa903e2"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "d1b7b508398e832b",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "36d0486204081242",
"hash_cont_tokens": "0382995cfcc24e3e"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1056,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "abb1a1e52668c5d2",
"hash_cont_tokens": "09ef20d27e0286fe"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "61e2034b4c6e4654",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "82070fd3e9c11558",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "cddd953fdc669e9c",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "b0c7c0bb0f29fd40",
"hash_cont_tokens": "2115091b39764e96"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "a110634091f861c3",
"hash_cont_tokens": "253e8f65a34d2f2b"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "b6add64b6e73a687",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "bc29b24a542d16e5",
"hash_cont_tokens": "863770146d3e3341"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "90bc926de760a574",
"hash_cont_tokens": "547784fe0135a15c"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "f950537458fba1cd",
"hash_cont_tokens": "545e7978a9a2e921"
},
"truncated": 0,
"non_truncated": 145,
"padded": 576,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "e449f8e0bbbffd70",
"hash_cont_tokens": "6220dafecd3e71a1"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "d6375de8b6e2852d",
"hash_cont_tokens": "9e1c83b748056f05"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "58fc4ea768c14475",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "ddd3edff37564eb2",
"hash_cont_tokens": "c0ee938431d4cce1"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "4afbcd19f348405b",
"hash_cont_tokens": "2fd86b22bfa1c8cb"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "6915f846cbbc1376",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "8c3bf3dd4b29788b",
"hash_cont_tokens": "8d52dfdbe7373dec"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "0833402461c795d7",
"hash_cont_tokens": "7daa2bbedae272e1"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "e655d173f257ba00",
"hash_cont_tokens": "530e7985f90589ad"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "ef9a976db5ae26bf",
"hash_cont_tokens": "8abfdac40b0aa157"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "47168004b37b8b1b",
"hash_cont_tokens": "0450a3d8e715e926"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "cc31b962af40a3a9",
"hash_cont_tokens": "3e477b8a15ec619c"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "e25d3a4bd30d4b9b",
"hash_cont_tokens": "f0648b1ae17e3c3f"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "81f1d60b0f28f1dd",
"hash_cont_tokens": "71a621b85c8384ec"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "cd8eff0a7fea8499",
"hash_cont_tokens": "507dec89f16c35ea"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "7e8a7b287cc950ce",
"hash_cont_tokens": "fe66e65deac902bb"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "637b13e89f48b91d",
"hash_cont_tokens": "7fe519011d639dc8"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "d79c0f32320628be",
"hash_cont_tokens": "77ba99656e04ddd0"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "0eb947a1d4a571d3",
"hash_cont_tokens": "bc8f34ada52ca31e"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "144b6f5b0ee2c132",
"hash_cont_tokens": "d4b66c0f10b911b8"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "5742978f580307e8",
"hash_cont_tokens": "f7ea9e092aff54a4"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "695ba87b872556f3",
"hash_cont_tokens": "9e305ec3d994de5c"
},
"truncated": 0,
"non_truncated": 163,
"padded": 648,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "76e0f333901e7a92",
"hash_cont_tokens": "85f6ff4f34ded537"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "cc757739b8954b9b",
"hash_cont_tokens": "1f24f5bf907f5f28"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "4f2988aaab0a7e07",
"hash_cont_tokens": "37062ffd1e129b49"
},
"truncated": 0,
"non_truncated": 234,
"padded": 932,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "9f045a466a2fa245",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "8abb58943afb732c",
"hash_cont_tokens": "64725e71e0bff006"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "034c4d68002216dc",
"hash_cont_tokens": "d73b7e792a1de62d"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1368,
"non_padded": 16,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "1cf1ba5ed283c147",
"hash_cont_tokens": "291bc548e95ea24c"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "4858f94c557e069a",
"hash_cont_tokens": "4159368fbefa62ba"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "bc1a814aa2ec0d97",
"hash_cont_tokens": "b3758c79335b5e25"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1240,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "d82f5763c7c0aeab",
"hash_cont_tokens": "c7aff90b52b3c210"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "4b98da57fa827859",
"hash_cont_tokens": "8fd4fe19db20b33f"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1128,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "a1223f558c8e6a22",
"hash_cont_tokens": "70fdfc3a3cdab2b2"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "25f9537254439cce",
"hash_cont_tokens": "2662c15f3eee1572"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "68981a198cecbd8b",
"hash_cont_tokens": "7b998c3f691a5888"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "2b9983200889161c",
"hash_cont_tokens": "9884d7f2589a4eec"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "6b5b6702fe4cedad",
"hash_cont_tokens": "87576f25f4731ef0"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "1cf3f4312411d9f5",
"hash_cont_tokens": "bba9af89c33fad2f"
},
"truncated": 0,
"non_truncated": 201,
"padded": 776,
"non_padded": 28,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "623fded50b4331b3",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "cf094bede564ab54",
"hash_cont_tokens": "16a5fb37a6047671"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "678d2329b9939306",
"hash_cont_tokens": "65fd69dde784be8d"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "373018952b5d17a6",
"hash_cont_tokens": "48248f31331ca20c"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "b119b996a57751b5",
"hash_cont_tokens": "5ffbcf0b87304360"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "821f45bcb336938c",
"hash_cont_tokens": "601b1975a93a3b3d"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "6925ffa4c3f930fe",
"hash_cont_tokens": "c88ce3b216a99d12"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113301,
"non_padded": 1571,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 3530386.216944582,
"end_time": 3537182.786109433,
"total_evaluation_time_secondes": "6796.569164851215",
"model_name": "saltlux/luxia-21.4b-alignment-v1.0",
"model_sha": "ba3403eaafc6d1f6e3a73245314ee96025c08d96",
"model_dtype": "torch.bfloat16",
"model_size": "47.9 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.7627986348122867,
"acc_stderr": 0.012430399829260851,
"acc_norm": 0.7747440273037542,
"acc_norm_stderr": 0.012207839995407314
},
"harness|hellaswag|10": {
"acc": 0.8125871340370444,
"acc_stderr": 0.0038944505016930363,
"acc_norm": 0.9188408683529178,
"acc_norm_stderr": 0.0027252124485788636
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6518518518518519,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.6518518518518519,
"acc_norm_stderr": 0.041153246103369526
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7763157894736842,
"acc_stderr": 0.03391160934343604,
"acc_norm": 0.7763157894736842,
"acc_norm_stderr": 0.03391160934343604
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.73,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.73,
"acc_norm_stderr": 0.04461960433384741
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7358490566037735,
"acc_stderr": 0.027134291628741713,
"acc_norm": 0.7358490566037735,
"acc_norm_stderr": 0.027134291628741713
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.8472222222222222,
"acc_stderr": 0.030085743248565666,
"acc_norm": 0.8472222222222222,
"acc_norm_stderr": 0.030085743248565666
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.44,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.44,
"acc_norm_stderr": 0.049888765156985884
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6647398843930635,
"acc_stderr": 0.03599586301247077,
"acc_norm": 0.6647398843930635,
"acc_norm_stderr": 0.03599586301247077
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.43137254901960786,
"acc_stderr": 0.04928099597287534,
"acc_norm": 0.43137254901960786,
"acc_norm_stderr": 0.04928099597287534
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.81,
"acc_stderr": 0.039427724440366234,
"acc_norm": 0.81,
"acc_norm_stderr": 0.039427724440366234
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.6808510638297872,
"acc_stderr": 0.030472973363380042,
"acc_norm": 0.6808510638297872,
"acc_norm_stderr": 0.030472973363380042
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5614035087719298,
"acc_stderr": 0.04668000738510455,
"acc_norm": 0.5614035087719298,
"acc_norm_stderr": 0.04668000738510455
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.6137931034482759,
"acc_stderr": 0.04057324734419036,
"acc_norm": 0.6137931034482759,
"acc_norm_stderr": 0.04057324734419036
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.5211640211640212,
"acc_stderr": 0.025728230952130726,
"acc_norm": 0.5211640211640212,
"acc_norm_stderr": 0.025728230952130726
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.46825396825396826,
"acc_stderr": 0.04463112720677172,
"acc_norm": 0.46825396825396826,
"acc_norm_stderr": 0.04463112720677172
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.8354838709677419,
"acc_stderr": 0.02109084774593932,
"acc_norm": 0.8354838709677419,
"acc_norm_stderr": 0.02109084774593932
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6059113300492611,
"acc_stderr": 0.034381579670365446,
"acc_norm": 0.6059113300492611,
"acc_norm_stderr": 0.034381579670365446
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.73,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.73,
"acc_norm_stderr": 0.0446196043338474
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8242424242424242,
"acc_stderr": 0.02972094300622445,
"acc_norm": 0.8242424242424242,
"acc_norm_stderr": 0.02972094300622445
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8434343434343434,
"acc_stderr": 0.025890520358141454,
"acc_norm": 0.8434343434343434,
"acc_norm_stderr": 0.025890520358141454
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8860103626943006,
"acc_stderr": 0.02293514405391943,
"acc_norm": 0.8860103626943006,
"acc_norm_stderr": 0.02293514405391943
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.7,
"acc_stderr": 0.023234581088428494,
"acc_norm": 0.7,
"acc_norm_stderr": 0.023234581088428494
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.36666666666666664,
"acc_stderr": 0.029381620726465076,
"acc_norm": 0.36666666666666664,
"acc_norm_stderr": 0.029381620726465076
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.7605042016806722,
"acc_stderr": 0.027722065493361262,
"acc_norm": 0.7605042016806722,
"acc_norm_stderr": 0.027722065493361262
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.4370860927152318,
"acc_stderr": 0.04050035722230636,
"acc_norm": 0.4370860927152318,
"acc_norm_stderr": 0.04050035722230636
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8587155963302753,
"acc_stderr": 0.014933868987028072,
"acc_norm": 0.8587155963302753,
"acc_norm_stderr": 0.014933868987028072
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5972222222222222,
"acc_stderr": 0.03344887382997865,
"acc_norm": 0.5972222222222222,
"acc_norm_stderr": 0.03344887382997865
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8676470588235294,
"acc_stderr": 0.02378429752091885,
"acc_norm": 0.8676470588235294,
"acc_norm_stderr": 0.02378429752091885
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8481012658227848,
"acc_stderr": 0.023363878096632443,
"acc_norm": 0.8481012658227848,
"acc_norm_stderr": 0.023363878096632443
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7354260089686099,
"acc_stderr": 0.029605103217038325,
"acc_norm": 0.7354260089686099,
"acc_norm_stderr": 0.029605103217038325
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6717557251908397,
"acc_stderr": 0.04118438565806298,
"acc_norm": 0.6717557251908397,
"acc_norm_stderr": 0.04118438565806298
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8429752066115702,
"acc_stderr": 0.03321244842547128,
"acc_norm": 0.8429752066115702,
"acc_norm_stderr": 0.03321244842547128
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.040191074725573483,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.040191074725573483
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.754601226993865,
"acc_stderr": 0.03380939813943354,
"acc_norm": 0.754601226993865,
"acc_norm_stderr": 0.03380939813943354
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4732142857142857,
"acc_stderr": 0.047389751192741546,
"acc_norm": 0.4732142857142857,
"acc_norm_stderr": 0.047389751192741546
},
"harness|hendrycksTest-management|5": {
"acc": 0.8446601941747572,
"acc_stderr": 0.03586594738573974,
"acc_norm": 0.8446601941747572,
"acc_norm_stderr": 0.03586594738573974
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8974358974358975,
"acc_stderr": 0.019875655027867447,
"acc_norm": 0.8974358974358975,
"acc_norm_stderr": 0.019875655027867447
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.73,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8109833971902938,
"acc_stderr": 0.014000791294407,
"acc_norm": 0.8109833971902938,
"acc_norm_stderr": 0.014000791294407
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7138728323699421,
"acc_stderr": 0.02433214677913413,
"acc_norm": 0.7138728323699421,
"acc_norm_stderr": 0.02433214677913413
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4659217877094972,
"acc_stderr": 0.016683615837486863,
"acc_norm": 0.4659217877094972,
"acc_norm_stderr": 0.016683615837486863
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7810457516339869,
"acc_stderr": 0.02367908986180772,
"acc_norm": 0.7810457516339869,
"acc_norm_stderr": 0.02367908986180772
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7363344051446945,
"acc_stderr": 0.02502553850053234,
"acc_norm": 0.7363344051446945,
"acc_norm_stderr": 0.02502553850053234
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7839506172839507,
"acc_stderr": 0.02289916291844579,
"acc_norm": 0.7839506172839507,
"acc_norm_stderr": 0.02289916291844579
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.5531914893617021,
"acc_stderr": 0.029658235097666907,
"acc_norm": 0.5531914893617021,
"acc_norm_stderr": 0.029658235097666907
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.48370273794002605,
"acc_stderr": 0.012763450734699817,
"acc_norm": 0.48370273794002605,
"acc_norm_stderr": 0.012763450734699817
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6838235294117647,
"acc_stderr": 0.02824568739146293,
"acc_norm": 0.6838235294117647,
"acc_norm_stderr": 0.02824568739146293
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6813725490196079,
"acc_stderr": 0.01885008469646872,
"acc_norm": 0.6813725490196079,
"acc_norm_stderr": 0.01885008469646872
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6636363636363637,
"acc_stderr": 0.04525393596302505,
"acc_norm": 0.6636363636363637,
"acc_norm_stderr": 0.04525393596302505
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7510204081632653,
"acc_stderr": 0.027682979522960238,
"acc_norm": 0.7510204081632653,
"acc_norm_stderr": 0.027682979522960238
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8308457711442786,
"acc_stderr": 0.02650859065623327,
"acc_norm": 0.8308457711442786,
"acc_norm_stderr": 0.02650859065623327
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.035887028128263686,
"acc_norm": 0.85,
"acc_norm_stderr": 0.035887028128263686
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5542168674698795,
"acc_stderr": 0.038695433234721015,
"acc_norm": 0.5542168674698795,
"acc_norm_stderr": 0.038695433234721015
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8187134502923976,
"acc_stderr": 0.029547741687640038,
"acc_norm": 0.8187134502923976,
"acc_norm_stderr": 0.029547741687640038
},
"harness|truthfulqa:mc|0": {
"mc1": 0.6523867809057528,
"mc1_stderr": 0.01667076918889731,
"mc2": 0.791656253485744,
"mc2_stderr": 0.01329262162821789
},
"harness|winogrande|5": {
"acc": 0.8745067087608525,
"acc_stderr": 0.009310542237486182
},
"harness|gsm8k|5": {
"acc": 0.6239575435936315,
"acc_stderr": 0.013342532064849767
},
"all": {
"acc": 0.6866913238774542,
"acc_stderr": 0.03138668671631704,
"acc_norm": 0.6865746717114842,
"acc_norm_stderr": 0.03204310199162772,
"mc1": 0.6523867809057528,
"mc1_stderr": 0.01667076918889731,
"mc2": 0.791656253485744,
"mc2_stderr": 0.01329262162821789
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "61360774732b48c5",
"hash_cont_tokens": "2c7776913d1cd316"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4664,
"non_padded": 23,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "dfafe55b3ee2e03f",
"hash_cont_tokens": "47ccd02cc15ea606"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40007,
"non_padded": 161,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "02f647d1e7128cbe",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "94b062ef497b6da1",
"hash_cont_tokens": "3448d00acc7a11c6"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "09648295653479cc",
"hash_cont_tokens": "9b5285416fa903e2"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "d1b7b508398e832b",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "36d0486204081242",
"hash_cont_tokens": "0382995cfcc24e3e"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1056,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "abb1a1e52668c5d2",
"hash_cont_tokens": "09ef20d27e0286fe"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "61e2034b4c6e4654",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "82070fd3e9c11558",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "cddd953fdc669e9c",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "b0c7c0bb0f29fd40",
"hash_cont_tokens": "2115091b39764e96"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "a110634091f861c3",
"hash_cont_tokens": "253e8f65a34d2f2b"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "b6add64b6e73a687",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "bc29b24a542d16e5",
"hash_cont_tokens": "863770146d3e3341"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "90bc926de760a574",
"hash_cont_tokens": "547784fe0135a15c"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "f950537458fba1cd",
"hash_cont_tokens": "545e7978a9a2e921"
},
"truncated": 0,
"non_truncated": 145,
"padded": 576,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "e449f8e0bbbffd70",
"hash_cont_tokens": "6220dafecd3e71a1"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "d6375de8b6e2852d",
"hash_cont_tokens": "9e1c83b748056f05"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "58fc4ea768c14475",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "ddd3edff37564eb2",
"hash_cont_tokens": "c0ee938431d4cce1"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "4afbcd19f348405b",
"hash_cont_tokens": "2fd86b22bfa1c8cb"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "6915f846cbbc1376",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "8c3bf3dd4b29788b",
"hash_cont_tokens": "8d52dfdbe7373dec"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "0833402461c795d7",
"hash_cont_tokens": "7daa2bbedae272e1"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "e655d173f257ba00",
"hash_cont_tokens": "530e7985f90589ad"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "ef9a976db5ae26bf",
"hash_cont_tokens": "8abfdac40b0aa157"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "47168004b37b8b1b",
"hash_cont_tokens": "0450a3d8e715e926"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "cc31b962af40a3a9",
"hash_cont_tokens": "3e477b8a15ec619c"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "e25d3a4bd30d4b9b",
"hash_cont_tokens": "f0648b1ae17e3c3f"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "81f1d60b0f28f1dd",
"hash_cont_tokens": "71a621b85c8384ec"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "cd8eff0a7fea8499",
"hash_cont_tokens": "507dec89f16c35ea"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "7e8a7b287cc950ce",
"hash_cont_tokens": "fe66e65deac902bb"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "637b13e89f48b91d",
"hash_cont_tokens": "7fe519011d639dc8"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "d79c0f32320628be",
"hash_cont_tokens": "77ba99656e04ddd0"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "0eb947a1d4a571d3",
"hash_cont_tokens": "bc8f34ada52ca31e"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "144b6f5b0ee2c132",
"hash_cont_tokens": "d4b66c0f10b911b8"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "5742978f580307e8",
"hash_cont_tokens": "f7ea9e092aff54a4"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "695ba87b872556f3",
"hash_cont_tokens": "9e305ec3d994de5c"
},
"truncated": 0,
"non_truncated": 163,
"padded": 648,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "76e0f333901e7a92",
"hash_cont_tokens": "85f6ff4f34ded537"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "cc757739b8954b9b",
"hash_cont_tokens": "1f24f5bf907f5f28"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "4f2988aaab0a7e07",
"hash_cont_tokens": "37062ffd1e129b49"
},
"truncated": 0,
"non_truncated": 234,
"padded": 932,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "9f045a466a2fa245",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "8abb58943afb732c",
"hash_cont_tokens": "64725e71e0bff006"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "034c4d68002216dc",
"hash_cont_tokens": "d73b7e792a1de62d"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1368,
"non_padded": 16,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "1cf1ba5ed283c147",
"hash_cont_tokens": "291bc548e95ea24c"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "4858f94c557e069a",
"hash_cont_tokens": "4159368fbefa62ba"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "bc1a814aa2ec0d97",
"hash_cont_tokens": "b3758c79335b5e25"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1240,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "d82f5763c7c0aeab",
"hash_cont_tokens": "c7aff90b52b3c210"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "4b98da57fa827859",
"hash_cont_tokens": "8fd4fe19db20b33f"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1128,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "a1223f558c8e6a22",
"hash_cont_tokens": "70fdfc3a3cdab2b2"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "25f9537254439cce",
"hash_cont_tokens": "2662c15f3eee1572"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "68981a198cecbd8b",
"hash_cont_tokens": "7b998c3f691a5888"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "2b9983200889161c",
"hash_cont_tokens": "9884d7f2589a4eec"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "6b5b6702fe4cedad",
"hash_cont_tokens": "87576f25f4731ef0"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "1cf3f4312411d9f5",
"hash_cont_tokens": "bba9af89c33fad2f"
},
"truncated": 0,
"non_truncated": 201,
"padded": 776,
"non_padded": 28,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "623fded50b4331b3",
"hash_cont_tokens": "e58c016de340de83"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "cf094bede564ab54",
"hash_cont_tokens": "16a5fb37a6047671"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "678d2329b9939306",
"hash_cont_tokens": "65fd69dde784be8d"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "373018952b5d17a6",
"hash_cont_tokens": "48248f31331ca20c"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "b119b996a57751b5",
"hash_cont_tokens": "5ffbcf0b87304360"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "821f45bcb336938c",
"hash_cont_tokens": "601b1975a93a3b3d"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "6925ffa4c3f930fe",
"hash_cont_tokens": "c88ce3b216a99d12"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113301,
"non_padded": 1571,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 160923.380972968,
"end_time": 167994.769021844,
"total_evaluation_time_secondes": "7071.38804887599",
"model_name": "yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B",
"model_sha": "915651208ea9f40c65a60d1f971a09f9461ee691",
"model_dtype": "torch.bfloat16",
"model_size": "24.49 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.7209897610921502,
"acc_stderr": 0.013106784883601334,
"acc_norm": 0.7491467576791809,
"acc_norm_stderr": 0.012668198621315425
},
"harness|hellaswag|10": {
"acc": 0.7189802828121888,
"acc_stderr": 0.004485784468576664,
"acc_norm": 0.8930491933877713,
"acc_norm_stderr": 0.0030841908180933076
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6296296296296297,
"acc_stderr": 0.041716541613545426,
"acc_norm": 0.6296296296296297,
"acc_norm_stderr": 0.041716541613545426
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6907894736842105,
"acc_stderr": 0.037610708698674805,
"acc_norm": 0.6907894736842105,
"acc_norm_stderr": 0.037610708698674805
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.66,
"acc_stderr": 0.04760952285695238,
"acc_norm": 0.66,
"acc_norm_stderr": 0.04760952285695238
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.720754716981132,
"acc_stderr": 0.027611163402399715,
"acc_norm": 0.720754716981132,
"acc_norm_stderr": 0.027611163402399715
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7638888888888888,
"acc_stderr": 0.03551446610810826,
"acc_norm": 0.7638888888888888,
"acc_norm_stderr": 0.03551446610810826
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.47,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.47,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6647398843930635,
"acc_stderr": 0.03599586301247077,
"acc_norm": 0.6647398843930635,
"acc_norm_stderr": 0.03599586301247077
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4411764705882353,
"acc_stderr": 0.049406356306056595,
"acc_norm": 0.4411764705882353,
"acc_norm_stderr": 0.049406356306056595
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768078
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5787234042553191,
"acc_stderr": 0.03227834510146268,
"acc_norm": 0.5787234042553191,
"acc_norm_stderr": 0.03227834510146268
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4824561403508772,
"acc_stderr": 0.04700708033551038,
"acc_norm": 0.4824561403508772,
"acc_norm_stderr": 0.04700708033551038
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5793103448275863,
"acc_stderr": 0.0411391498118926,
"acc_norm": 0.5793103448275863,
"acc_norm_stderr": 0.0411391498118926
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41798941798941797,
"acc_stderr": 0.02540255550326091,
"acc_norm": 0.41798941798941797,
"acc_norm_stderr": 0.02540255550326091
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.04444444444444449,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.04444444444444449
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7967741935483871,
"acc_stderr": 0.022891687984554963,
"acc_norm": 0.7967741935483871,
"acc_norm_stderr": 0.022891687984554963
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4876847290640394,
"acc_stderr": 0.035169204442208966,
"acc_norm": 0.4876847290640394,
"acc_norm_stderr": 0.035169204442208966
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.73,
"acc_stderr": 0.04461960433384739,
"acc_norm": 0.73,
"acc_norm_stderr": 0.04461960433384739
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7818181818181819,
"acc_stderr": 0.03225078108306289,
"acc_norm": 0.7818181818181819,
"acc_norm_stderr": 0.03225078108306289
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7727272727272727,
"acc_stderr": 0.02985751567338641,
"acc_norm": 0.7727272727272727,
"acc_norm_stderr": 0.02985751567338641
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8911917098445595,
"acc_stderr": 0.022473253332768766,
"acc_norm": 0.8911917098445595,
"acc_norm_stderr": 0.022473253332768766
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.676923076923077,
"acc_stderr": 0.02371088850197057,
"acc_norm": 0.676923076923077,
"acc_norm_stderr": 0.02371088850197057
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.028897748741131147,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.028897748741131147
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6680672268907563,
"acc_stderr": 0.03058869701378364,
"acc_norm": 0.6680672268907563,
"acc_norm_stderr": 0.03058869701378364
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.33774834437086093,
"acc_stderr": 0.038615575462551684,
"acc_norm": 0.33774834437086093,
"acc_norm_stderr": 0.038615575462551684
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8477064220183487,
"acc_stderr": 0.015405084393157074,
"acc_norm": 0.8477064220183487,
"acc_norm_stderr": 0.015405084393157074
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5046296296296297,
"acc_stderr": 0.03409825519163572,
"acc_norm": 0.5046296296296297,
"acc_norm_stderr": 0.03409825519163572
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8284313725490197,
"acc_stderr": 0.026460569561240644,
"acc_norm": 0.8284313725490197,
"acc_norm_stderr": 0.026460569561240644
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7932489451476793,
"acc_stderr": 0.0263616516683891,
"acc_norm": 0.7932489451476793,
"acc_norm_stderr": 0.0263616516683891
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6860986547085202,
"acc_stderr": 0.031146796482972465,
"acc_norm": 0.6860986547085202,
"acc_norm_stderr": 0.031146796482972465
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8015267175572519,
"acc_stderr": 0.03498149385462472,
"acc_norm": 0.8015267175572519,
"acc_norm_stderr": 0.03498149385462472
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7768595041322314,
"acc_stderr": 0.03800754475228732,
"acc_norm": 0.7768595041322314,
"acc_norm_stderr": 0.03800754475228732
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7592592592592593,
"acc_stderr": 0.04133119440243839,
"acc_norm": 0.7592592592592593,
"acc_norm_stderr": 0.04133119440243839
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7791411042944786,
"acc_stderr": 0.03259177392742178,
"acc_norm": 0.7791411042944786,
"acc_norm_stderr": 0.03259177392742178
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.04697113923010212,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.04697113923010212
},
"harness|hendrycksTest-management|5": {
"acc": 0.7669902912621359,
"acc_stderr": 0.04185832598928315,
"acc_norm": 0.7669902912621359,
"acc_norm_stderr": 0.04185832598928315
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8846153846153846,
"acc_stderr": 0.02093019318517933,
"acc_norm": 0.8846153846153846,
"acc_norm_stderr": 0.02093019318517933
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768079
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8275862068965517,
"acc_stderr": 0.013507943909371802,
"acc_norm": 0.8275862068965517,
"acc_norm_stderr": 0.013507943909371802
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7254335260115607,
"acc_stderr": 0.02402774515526502,
"acc_norm": 0.7254335260115607,
"acc_norm_stderr": 0.02402774515526502
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4491620111731844,
"acc_stderr": 0.01663583834163192,
"acc_norm": 0.4491620111731844,
"acc_norm_stderr": 0.01663583834163192
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7287581699346405,
"acc_stderr": 0.025457756696667874,
"acc_norm": 0.7287581699346405,
"acc_norm_stderr": 0.025457756696667874
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7170418006430869,
"acc_stderr": 0.025583062489984813,
"acc_norm": 0.7170418006430869,
"acc_norm_stderr": 0.025583062489984813
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7345679012345679,
"acc_stderr": 0.024569223600460845,
"acc_norm": 0.7345679012345679,
"acc_norm_stderr": 0.024569223600460845
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.48936170212765956,
"acc_stderr": 0.02982074719142248,
"acc_norm": 0.48936170212765956,
"acc_norm_stderr": 0.02982074719142248
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.46870925684485004,
"acc_stderr": 0.012745204626083136,
"acc_norm": 0.46870925684485004,
"acc_norm_stderr": 0.012745204626083136
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6727941176470589,
"acc_stderr": 0.028501452860396553,
"acc_norm": 0.6727941176470589,
"acc_norm_stderr": 0.028501452860396553
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.673202614379085,
"acc_stderr": 0.0189754279205072,
"acc_norm": 0.673202614379085,
"acc_norm_stderr": 0.0189754279205072
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7306122448979592,
"acc_stderr": 0.02840125202902294,
"acc_norm": 0.7306122448979592,
"acc_norm_stderr": 0.02840125202902294
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8308457711442786,
"acc_stderr": 0.02650859065623327,
"acc_norm": 0.8308457711442786,
"acc_norm_stderr": 0.02650859065623327
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.03588702812826371,
"acc_norm": 0.85,
"acc_norm_stderr": 0.03588702812826371
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5481927710843374,
"acc_stderr": 0.03874371556587953,
"acc_norm": 0.5481927710843374,
"acc_norm_stderr": 0.03874371556587953
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8362573099415205,
"acc_stderr": 0.028380919596145866,
"acc_norm": 0.8362573099415205,
"acc_norm_stderr": 0.028380919596145866
},
"harness|truthfulqa:mc|0": {
"mc1": 0.627906976744186,
"mc1_stderr": 0.01692109011881403,
"mc2": 0.780160272588061,
"mc2_stderr": 0.013871089730066658
},
"harness|winogrande|5": {
"acc": 0.8823993685872139,
"acc_stderr": 0.009053584685573185
},
"harness|gsm8k|5": {
"acc": 0.6952236542835482,
"acc_stderr": 0.012679297549515437
},
"all": {
"acc": 0.6537295532650617,
"acc_stderr": 0.0320842334613103,
"acc_norm": 0.6525780615840782,
"acc_norm_stderr": 0.03277229116935712,
"mc1": 0.627906976744186,
"mc1_stderr": 0.01692109011881403,
"mc2": 0.780160272588061,
"mc2_stderr": 0.013871089730066658
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "9bcd0d1d37471713",
"hash_cont_tokens": "289aa98c400841d8"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4670,
"non_padded": 17,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "80b8c6d79740318e",
"hash_cont_tokens": "ac460260c3e6efc9"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40101,
"non_padded": 67,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "b813d36287c6556c",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "09dc2380497f7a47",
"hash_cont_tokens": "a52a4f60d98cbe5c"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "68ca3220b0fdd1f3",
"hash_cont_tokens": "10f7d8eeba97841d"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "bd14ef1320de241e",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "d96186ab98017c43",
"hash_cont_tokens": "edef9975ba9165b5"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "424136b34e95b200",
"hash_cont_tokens": "0aa103ec6602280b"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "8dd8b80e336bbe54",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "145d4cef8ca2261d",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "561995d32d2b25c4",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "6a258a9d4418599c",
"hash_cont_tokens": "1979021dbc698754"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "fa5e0d5b5f97b66a",
"hash_cont_tokens": "7cf7fe2bab00acbd"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "07d27397edfae492",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "da5e6c3c8eb17da6",
"hash_cont_tokens": "903f64eed2b0d217"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "f6ba8e358bdb523e",
"hash_cont_tokens": "721ae6c5302c4bf2"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "b2459da4c5ca8590",
"hash_cont_tokens": "15a738960ed3e587"
},
"truncated": 0,
"non_truncated": 145,
"padded": 575,
"non_padded": 5,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "0b969d9ad706a13a",
"hash_cont_tokens": "c96470462fc71683"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "02bc3eb5f90da86e",
"hash_cont_tokens": "0e1ce025c9d6ee7e"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "3d5106918bcbeb43",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "7b089392db2dabbd",
"hash_cont_tokens": "e34d57f7d3c4ca16"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "ba90b2ffed1c067d",
"hash_cont_tokens": "e8482d44df4b3740"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "60eeec309ef0717f",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "5e5e8bf3808e0ead",
"hash_cont_tokens": "d63e679a49418339"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "4da9b741d4e7ea78",
"hash_cont_tokens": "d78483e286d06f1a"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "acb4bc872ac86ed7",
"hash_cont_tokens": "691cdff71ff5fe57"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "840fc6403eb69ab0",
"hash_cont_tokens": "d5ad4c5bdca967ad"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "3629a7f2cd17faeb",
"hash_cont_tokens": "8f631ca5687dd0d4"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "6846f684260e3997",
"hash_cont_tokens": "7321048a28451473"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "85aee25d6bdad94a",
"hash_cont_tokens": "bb137581f269861c"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "290b66d6d666a35f",
"hash_cont_tokens": "b455cab2675bd863"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "a77a7668b437bc82",
"hash_cont_tokens": "1b3196fec7e58037"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "63548c7fa9ba7a78",
"hash_cont_tokens": "a331dedc2aa01b3e"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "83c5da18bfa50812",
"hash_cont_tokens": "d0fbe030b8c8c2bf"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "bebbd11f22006685",
"hash_cont_tokens": "1dd29c3755494850"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "7b85ee9b8ee54f4f",
"hash_cont_tokens": "c85573f663c10691"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "7bfc55ab7065943e",
"hash_cont_tokens": "d263804ba918154f"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "69573f1675e053c6",
"hash_cont_tokens": "581986691a84ece8"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "552324ef20094bdc",
"hash_cont_tokens": "55a858b28bbda458"
},
"truncated": 0,
"non_truncated": 163,
"padded": 652,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "96449357a7318905",
"hash_cont_tokens": "e99d3d3efd4ac7a3"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "3b849249168e3b88",
"hash_cont_tokens": "13d9dc56bca34726"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "af0e186f2756b70d",
"hash_cont_tokens": "2700ea26933916a2"
},
"truncated": 0,
"non_truncated": 234,
"padded": 936,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "9f6a6de16509b6d9",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "9194406d589f7c10",
"hash_cont_tokens": "7bf4341c79587250"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "769486efc74d9f8e",
"hash_cont_tokens": "38a48e9de6976f00"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1384,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "a90fd4dd90959dad",
"hash_cont_tokens": "761c4dc187689d89"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "1a3b843e66efd29b",
"hash_cont_tokens": "65005bd7d6f6012a"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "09820001a3d00013",
"hash_cont_tokens": "0b47934fb6314dec"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "7c4ec364ce2768c7",
"hash_cont_tokens": "3f20acd855ee0a29"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "ced0534574d0ae3f",
"hash_cont_tokens": "8f122ba881355d4b"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1128,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "bcbdbbde22ec73e3",
"hash_cont_tokens": "90d5df417c4d3fd3"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "c54d753563114d45",
"hash_cont_tokens": "4a2d2988884f7f70"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "b75dc55c0e32fa52",
"hash_cont_tokens": "e0a952cb8a9c81de"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "5ccdc8ec8db99622",
"hash_cont_tokens": "1fa77a8dff3922b8"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "ca8497342e5b1d57",
"hash_cont_tokens": "81fc9cb3cbdd52db"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "069c76424fbd3dab",
"hash_cont_tokens": "2a0493252ed2cf43"
},
"truncated": 0,
"non_truncated": 201,
"padded": 804,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "a7e393a626169576",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "bf99dc973e3a650d",
"hash_cont_tokens": "5ab892d003b00c98"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "1761cfaf21797065",
"hash_cont_tokens": "15a5e5dbdfbb8568"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "298b43914bbdf4ca",
"hash_cont_tokens": "5a8d4bb398b1c3c0"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "31aa3477d959f771",
"hash_cont_tokens": "618558fb93c0f288"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "6af0ae8cfe684f50",
"hash_cont_tokens": "6479c5b1ee1a1244"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "9c04e828ae29cacc",
"hash_cont_tokens": "6f55e1d4b6362a63"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113460,
"non_padded": 1412,
"num_truncated_few_shots": 0
} |
{
"lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 3879365.556456375,
"end_time": 3887398.310261111,
"total_evaluation_time_secondes": "8032.753804735839",
"model_name": "zhengr/MixTAO-7Bx2-MoE-v8.1",
"model_sha": "2d8cff968dbfb31e0c1ccc42053ccc4d2698a390",
"model_dtype": "torch.bfloat16",
"model_size": "24.49 GB"
} | {
"harness|arc:challenge|25": {
"acc": 0.7167235494880546,
"acc_stderr": 0.013167478735134575,
"acc_norm": 0.7380546075085325,
"acc_norm_stderr": 0.012849054826858107
},
"harness|hellaswag|10": {
"acc": 0.7191794463254332,
"acc_stderr": 0.004484815647064647,
"acc_norm": 0.8921529575781717,
"acc_norm_stderr": 0.003095531986802188
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6222222222222222,
"acc_stderr": 0.04188307537595853,
"acc_norm": 0.6222222222222222,
"acc_norm_stderr": 0.04188307537595853
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6973684210526315,
"acc_stderr": 0.03738520676119669,
"acc_norm": 0.6973684210526315,
"acc_norm_stderr": 0.03738520676119669
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.66,
"acc_stderr": 0.04760952285695238,
"acc_norm": 0.66,
"acc_norm_stderr": 0.04760952285695238
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7018867924528301,
"acc_stderr": 0.028152837942493857,
"acc_norm": 0.7018867924528301,
"acc_norm_stderr": 0.028152837942493857
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.03476590104304134,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.03476590104304134
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956911,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956911
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.56,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.56,
"acc_norm_stderr": 0.049888765156985884
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6763005780346821,
"acc_stderr": 0.0356760379963917,
"acc_norm": 0.6763005780346821,
"acc_norm_stderr": 0.0356760379963917
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4411764705882353,
"acc_stderr": 0.049406356306056595,
"acc_norm": 0.4411764705882353,
"acc_norm_stderr": 0.049406356306056595
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768078
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5787234042553191,
"acc_stderr": 0.03227834510146268,
"acc_norm": 0.5787234042553191,
"acc_norm_stderr": 0.03227834510146268
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.04702880432049615,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.04702880432049615
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5724137931034483,
"acc_stderr": 0.04122737111370332,
"acc_norm": 0.5724137931034483,
"acc_norm_stderr": 0.04122737111370332
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.4312169312169312,
"acc_stderr": 0.025506481698138215,
"acc_norm": 0.4312169312169312,
"acc_norm_stderr": 0.025506481698138215
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.04444444444444449,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.04444444444444449
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7806451612903226,
"acc_stderr": 0.023540799358723295,
"acc_norm": 0.7806451612903226,
"acc_norm_stderr": 0.023540799358723295
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5024630541871922,
"acc_stderr": 0.035179450386910616,
"acc_norm": 0.5024630541871922,
"acc_norm_stderr": 0.035179450386910616
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542127
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7757575757575758,
"acc_stderr": 0.03256866661681102,
"acc_norm": 0.7757575757575758,
"acc_norm_stderr": 0.03256866661681102
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8080808080808081,
"acc_stderr": 0.028057791672989017,
"acc_norm": 0.8080808080808081,
"acc_norm_stderr": 0.028057791672989017
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8860103626943006,
"acc_stderr": 0.022935144053919436,
"acc_norm": 0.8860103626943006,
"acc_norm_stderr": 0.022935144053919436
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6743589743589744,
"acc_stderr": 0.02375966576741229,
"acc_norm": 0.6743589743589744,
"acc_norm_stderr": 0.02375966576741229
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.337037037037037,
"acc_stderr": 0.02882088466625326,
"acc_norm": 0.337037037037037,
"acc_norm_stderr": 0.02882088466625326
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6722689075630253,
"acc_stderr": 0.03048991141767323,
"acc_norm": 0.6722689075630253,
"acc_norm_stderr": 0.03048991141767323
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.37748344370860926,
"acc_stderr": 0.0395802723112157,
"acc_norm": 0.37748344370860926,
"acc_norm_stderr": 0.0395802723112157
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8366972477064221,
"acc_stderr": 0.015848255806501562,
"acc_norm": 0.8366972477064221,
"acc_norm_stderr": 0.015848255806501562
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.03407632093854051,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.03407632093854051
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8382352941176471,
"acc_stderr": 0.025845017986926917,
"acc_norm": 0.8382352941176471,
"acc_norm_stderr": 0.025845017986926917
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7974683544303798,
"acc_stderr": 0.026160568246601436,
"acc_norm": 0.7974683544303798,
"acc_norm_stderr": 0.026160568246601436
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6905829596412556,
"acc_stderr": 0.03102441174057221,
"acc_norm": 0.6905829596412556,
"acc_norm_stderr": 0.03102441174057221
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7786259541984732,
"acc_stderr": 0.03641297081313729,
"acc_norm": 0.7786259541984732,
"acc_norm_stderr": 0.03641297081313729
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7768595041322314,
"acc_stderr": 0.03800754475228732,
"acc_norm": 0.7768595041322314,
"acc_norm_stderr": 0.03800754475228732
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7592592592592593,
"acc_stderr": 0.04133119440243839,
"acc_norm": 0.7592592592592593,
"acc_norm_stderr": 0.04133119440243839
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7791411042944786,
"acc_stderr": 0.03259177392742178,
"acc_norm": 0.7791411042944786,
"acc_norm_stderr": 0.03259177392742178
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4107142857142857,
"acc_stderr": 0.0466951066387519,
"acc_norm": 0.4107142857142857,
"acc_norm_stderr": 0.0466951066387519
},
"harness|hendrycksTest-management|5": {
"acc": 0.7864077669902912,
"acc_stderr": 0.040580420156460344,
"acc_norm": 0.7864077669902912,
"acc_norm_stderr": 0.040580420156460344
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8803418803418803,
"acc_stderr": 0.021262719400406964,
"acc_norm": 0.8803418803418803,
"acc_norm_stderr": 0.021262719400406964
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.73,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8301404853128991,
"acc_stderr": 0.013428186370608304,
"acc_norm": 0.8301404853128991,
"acc_norm_stderr": 0.013428186370608304
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7312138728323699,
"acc_stderr": 0.023868003262500104,
"acc_norm": 0.7312138728323699,
"acc_norm_stderr": 0.023868003262500104
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4547486033519553,
"acc_stderr": 0.016653875777524006,
"acc_norm": 0.4547486033519553,
"acc_norm_stderr": 0.016653875777524006
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7124183006535948,
"acc_stderr": 0.02591780611714716,
"acc_norm": 0.7124183006535948,
"acc_norm_stderr": 0.02591780611714716
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7170418006430869,
"acc_stderr": 0.02558306248998481,
"acc_norm": 0.7170418006430869,
"acc_norm_stderr": 0.02558306248998481
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7376543209876543,
"acc_stderr": 0.024477222856135114,
"acc_norm": 0.7376543209876543,
"acc_norm_stderr": 0.024477222856135114
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.5141843971631206,
"acc_stderr": 0.02981549448368206,
"acc_norm": 0.5141843971631206,
"acc_norm_stderr": 0.02981549448368206
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4765319426336376,
"acc_stderr": 0.012756161942523367,
"acc_norm": 0.4765319426336376,
"acc_norm_stderr": 0.012756161942523367
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.02841820861940676,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.02841820861940676
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6830065359477124,
"acc_stderr": 0.018824219512706207,
"acc_norm": 0.6830065359477124,
"acc_norm_stderr": 0.018824219512706207
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6636363636363637,
"acc_stderr": 0.04525393596302506,
"acc_norm": 0.6636363636363637,
"acc_norm_stderr": 0.04525393596302506
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7387755102040816,
"acc_stderr": 0.028123429335142783,
"acc_norm": 0.7387755102040816,
"acc_norm_stderr": 0.028123429335142783
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8308457711442786,
"acc_stderr": 0.02650859065623327,
"acc_norm": 0.8308457711442786,
"acc_norm_stderr": 0.02650859065623327
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.0358870281282637,
"acc_norm": 0.85,
"acc_norm_stderr": 0.0358870281282637
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5602409638554217,
"acc_stderr": 0.03864139923699122,
"acc_norm": 0.5602409638554217,
"acc_norm_stderr": 0.03864139923699122
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8362573099415205,
"acc_stderr": 0.028380919596145866,
"acc_norm": 0.8362573099415205,
"acc_norm_stderr": 0.028380919596145866
},
"harness|truthfulqa:mc|0": {
"mc1": 0.6389228886168911,
"mc1_stderr": 0.01681431284483688,
"mc2": 0.7857105477952205,
"mc2_stderr": 0.013572265560720762
},
"harness|winogrande|5": {
"acc": 0.8737174427782163,
"acc_stderr": 0.009335559129908471
},
"harness|gsm8k|5": {
"acc": 0.711144806671721,
"acc_stderr": 0.012484219800126666
},
"all": {
"acc": 0.6561581700189073,
"acc_stderr": 0.0320963615992398,
"acc_norm": 0.6548320456097733,
"acc_norm_stderr": 0.0327856028992382,
"mc1": 0.6389228886168911,
"mc1_stderr": 0.01681431284483688,
"mc2": 0.7857105477952205,
"mc2_stderr": 0.013572265560720762
}
} | {
"all": 0,
"harness|arc:challenge|25": 0,
"harness|gsm8k|5": 0,
"harness|hellaswag|10": 0,
"harness|hendrycksTest-abstract_algebra|5": 1,
"harness|hendrycksTest-anatomy|5": 1,
"harness|hendrycksTest-astronomy|5": 1,
"harness|hendrycksTest-business_ethics|5": 1,
"harness|hendrycksTest-clinical_knowledge|5": 1,
"harness|hendrycksTest-college_biology|5": 1,
"harness|hendrycksTest-college_chemistry|5": 1,
"harness|hendrycksTest-college_computer_science|5": 1,
"harness|hendrycksTest-college_mathematics|5": 1,
"harness|hendrycksTest-college_medicine|5": 1,
"harness|hendrycksTest-college_physics|5": 1,
"harness|hendrycksTest-computer_security|5": 1,
"harness|hendrycksTest-conceptual_physics|5": 1,
"harness|hendrycksTest-econometrics|5": 1,
"harness|hendrycksTest-electrical_engineering|5": 1,
"harness|hendrycksTest-elementary_mathematics|5": 1,
"harness|hendrycksTest-formal_logic|5": 1,
"harness|hendrycksTest-global_facts|5": 1,
"harness|hendrycksTest-high_school_biology|5": 1,
"harness|hendrycksTest-high_school_chemistry|5": 1,
"harness|hendrycksTest-high_school_computer_science|5": 1,
"harness|hendrycksTest-high_school_european_history|5": 1,
"harness|hendrycksTest-high_school_geography|5": 1,
"harness|hendrycksTest-high_school_government_and_politics|5": 1,
"harness|hendrycksTest-high_school_macroeconomics|5": 1,
"harness|hendrycksTest-high_school_mathematics|5": 1,
"harness|hendrycksTest-high_school_microeconomics|5": 1,
"harness|hendrycksTest-high_school_physics|5": 1,
"harness|hendrycksTest-high_school_psychology|5": 1,
"harness|hendrycksTest-high_school_statistics|5": 1,
"harness|hendrycksTest-high_school_us_history|5": 1,
"harness|hendrycksTest-high_school_world_history|5": 1,
"harness|hendrycksTest-human_aging|5": 1,
"harness|hendrycksTest-human_sexuality|5": 1,
"harness|hendrycksTest-international_law|5": 1,
"harness|hendrycksTest-jurisprudence|5": 1,
"harness|hendrycksTest-logical_fallacies|5": 1,
"harness|hendrycksTest-machine_learning|5": 1,
"harness|hendrycksTest-management|5": 1,
"harness|hendrycksTest-marketing|5": 1,
"harness|hendrycksTest-medical_genetics|5": 1,
"harness|hendrycksTest-miscellaneous|5": 1,
"harness|hendrycksTest-moral_disputes|5": 1,
"harness|hendrycksTest-moral_scenarios|5": 1,
"harness|hendrycksTest-nutrition|5": 1,
"harness|hendrycksTest-philosophy|5": 1,
"harness|hendrycksTest-prehistory|5": 1,
"harness|hendrycksTest-professional_accounting|5": 1,
"harness|hendrycksTest-professional_law|5": 1,
"harness|hendrycksTest-professional_medicine|5": 1,
"harness|hendrycksTest-professional_psychology|5": 1,
"harness|hendrycksTest-public_relations|5": 1,
"harness|hendrycksTest-security_studies|5": 1,
"harness|hendrycksTest-sociology|5": 1,
"harness|hendrycksTest-us_foreign_policy|5": 1,
"harness|hendrycksTest-virology|5": 1,
"harness|hendrycksTest-world_religions|5": 1,
"harness|truthfulqa:mc|0": 1,
"harness|winogrande|5": 0
} | {
"harness|arc:challenge": "LM Harness task",
"harness|gsm8k": "LM Harness task",
"harness|hellaswag": "LM Harness task",
"harness|hendrycksTest-abstract_algebra": "LM Harness task",
"harness|hendrycksTest-anatomy": "LM Harness task",
"harness|hendrycksTest-astronomy": "LM Harness task",
"harness|hendrycksTest-business_ethics": "LM Harness task",
"harness|hendrycksTest-clinical_knowledge": "LM Harness task",
"harness|hendrycksTest-college_biology": "LM Harness task",
"harness|hendrycksTest-college_chemistry": "LM Harness task",
"harness|hendrycksTest-college_computer_science": "LM Harness task",
"harness|hendrycksTest-college_mathematics": "LM Harness task",
"harness|hendrycksTest-college_medicine": "LM Harness task",
"harness|hendrycksTest-college_physics": "LM Harness task",
"harness|hendrycksTest-computer_security": "LM Harness task",
"harness|hendrycksTest-conceptual_physics": "LM Harness task",
"harness|hendrycksTest-econometrics": "LM Harness task",
"harness|hendrycksTest-electrical_engineering": "LM Harness task",
"harness|hendrycksTest-elementary_mathematics": "LM Harness task",
"harness|hendrycksTest-formal_logic": "LM Harness task",
"harness|hendrycksTest-global_facts": "LM Harness task",
"harness|hendrycksTest-high_school_biology": "LM Harness task",
"harness|hendrycksTest-high_school_chemistry": "LM Harness task",
"harness|hendrycksTest-high_school_computer_science": "LM Harness task",
"harness|hendrycksTest-high_school_european_history": "LM Harness task",
"harness|hendrycksTest-high_school_geography": "LM Harness task",
"harness|hendrycksTest-high_school_government_and_politics": "LM Harness task",
"harness|hendrycksTest-high_school_macroeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_mathematics": "LM Harness task",
"harness|hendrycksTest-high_school_microeconomics": "LM Harness task",
"harness|hendrycksTest-high_school_physics": "LM Harness task",
"harness|hendrycksTest-high_school_psychology": "LM Harness task",
"harness|hendrycksTest-high_school_statistics": "LM Harness task",
"harness|hendrycksTest-high_school_us_history": "LM Harness task",
"harness|hendrycksTest-high_school_world_history": "LM Harness task",
"harness|hendrycksTest-human_aging": "LM Harness task",
"harness|hendrycksTest-human_sexuality": "LM Harness task",
"harness|hendrycksTest-international_law": "LM Harness task",
"harness|hendrycksTest-jurisprudence": "LM Harness task",
"harness|hendrycksTest-logical_fallacies": "LM Harness task",
"harness|hendrycksTest-machine_learning": "LM Harness task",
"harness|hendrycksTest-management": "LM Harness task",
"harness|hendrycksTest-marketing": "LM Harness task",
"harness|hendrycksTest-medical_genetics": "LM Harness task",
"harness|hendrycksTest-miscellaneous": "LM Harness task",
"harness|hendrycksTest-moral_disputes": "LM Harness task",
"harness|hendrycksTest-moral_scenarios": "LM Harness task",
"harness|hendrycksTest-nutrition": "LM Harness task",
"harness|hendrycksTest-philosophy": "LM Harness task",
"harness|hendrycksTest-prehistory": "LM Harness task",
"harness|hendrycksTest-professional_accounting": "LM Harness task",
"harness|hendrycksTest-professional_law": "LM Harness task",
"harness|hendrycksTest-professional_medicine": "LM Harness task",
"harness|hendrycksTest-professional_psychology": "LM Harness task",
"harness|hendrycksTest-public_relations": "LM Harness task",
"harness|hendrycksTest-security_studies": "LM Harness task",
"harness|hendrycksTest-sociology": "LM Harness task",
"harness|hendrycksTest-us_foreign_policy": "LM Harness task",
"harness|hendrycksTest-virology": "LM Harness task",
"harness|hendrycksTest-world_religions": "LM Harness task",
"harness|truthfulqa:mc": "LM Harness task",
"harness|winogrande": "LM Harness task"
} | {
"harness|arc:challenge|25": {
"hashes": {
"hash_examples": "17b0cae357c0259e",
"hash_full_prompts": "045cbb916e5145c6",
"hash_input_tokens": "9bcd0d1d37471713",
"hash_cont_tokens": "289aa98c400841d8"
},
"truncated": 0,
"non_truncated": 1172,
"padded": 4670,
"non_padded": 17,
"effective_few_shots": 25,
"num_truncated_few_shots": 0
},
"harness|hellaswag|10": {
"hashes": {
"hash_examples": "e1768ecb99d7ecf0",
"hash_full_prompts": "0b4c16983130f84f",
"hash_input_tokens": "80b8c6d79740318e",
"hash_cont_tokens": "ac460260c3e6efc9"
},
"truncated": 0,
"non_truncated": 10042,
"padded": 40101,
"non_padded": 67,
"effective_few_shots": 10,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-abstract_algebra|5": {
"hashes": {
"hash_examples": "280f9f325b40559a",
"hash_full_prompts": "2f776a367d23aea2",
"hash_input_tokens": "b813d36287c6556c",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-anatomy|5": {
"hashes": {
"hash_examples": "2f83a4f1cab4ba18",
"hash_full_prompts": "516f74bef25df620",
"hash_input_tokens": "09dc2380497f7a47",
"hash_cont_tokens": "a52a4f60d98cbe5c"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-astronomy|5": {
"hashes": {
"hash_examples": "7d587b908da4d762",
"hash_full_prompts": "faf4e80f65de93ca",
"hash_input_tokens": "68ca3220b0fdd1f3",
"hash_cont_tokens": "10f7d8eeba97841d"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-business_ethics|5": {
"hashes": {
"hash_examples": "33e51740670de686",
"hash_full_prompts": "db01c3ef8e1479d4",
"hash_input_tokens": "bd14ef1320de241e",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-clinical_knowledge|5": {
"hashes": {
"hash_examples": "f3366dbe7eefffa4",
"hash_full_prompts": "49654f71d94b65c3",
"hash_input_tokens": "d96186ab98017c43",
"hash_cont_tokens": "edef9975ba9165b5"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_biology|5": {
"hashes": {
"hash_examples": "ca2b6753a0193e7f",
"hash_full_prompts": "2b460b75f1fdfefd",
"hash_input_tokens": "424136b34e95b200",
"hash_cont_tokens": "0aa103ec6602280b"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_chemistry|5": {
"hashes": {
"hash_examples": "22ff85f1d34f42d1",
"hash_full_prompts": "242c9be6da583e95",
"hash_input_tokens": "8dd8b80e336bbe54",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_computer_science|5": {
"hashes": {
"hash_examples": "30318289d717a5cf",
"hash_full_prompts": "ed2bdb4e87c4b371",
"hash_input_tokens": "145d4cef8ca2261d",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_mathematics|5": {
"hashes": {
"hash_examples": "4944d1f0b6b5d911",
"hash_full_prompts": "770bc4281c973190",
"hash_input_tokens": "561995d32d2b25c4",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_medicine|5": {
"hashes": {
"hash_examples": "dd69cc33381275af",
"hash_full_prompts": "ad2a53e5250ab46e",
"hash_input_tokens": "6a258a9d4418599c",
"hash_cont_tokens": "1979021dbc698754"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-college_physics|5": {
"hashes": {
"hash_examples": "875dd26d22655b0d",
"hash_full_prompts": "833a0d7b55aed500",
"hash_input_tokens": "fa5e0d5b5f97b66a",
"hash_cont_tokens": "7cf7fe2bab00acbd"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-computer_security|5": {
"hashes": {
"hash_examples": "006451eedc0ededb",
"hash_full_prompts": "94034c97e85d8f46",
"hash_input_tokens": "07d27397edfae492",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-conceptual_physics|5": {
"hashes": {
"hash_examples": "8874ece872d2ca4c",
"hash_full_prompts": "e40d15a34640d6fa",
"hash_input_tokens": "da5e6c3c8eb17da6",
"hash_cont_tokens": "903f64eed2b0d217"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-econometrics|5": {
"hashes": {
"hash_examples": "64d3623b0bfaa43f",
"hash_full_prompts": "612f340fae41338d",
"hash_input_tokens": "f6ba8e358bdb523e",
"hash_cont_tokens": "721ae6c5302c4bf2"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-electrical_engineering|5": {
"hashes": {
"hash_examples": "e98f51780c674d7e",
"hash_full_prompts": "10275b312d812ae6",
"hash_input_tokens": "b2459da4c5ca8590",
"hash_cont_tokens": "15a738960ed3e587"
},
"truncated": 0,
"non_truncated": 145,
"padded": 575,
"non_padded": 5,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-elementary_mathematics|5": {
"hashes": {
"hash_examples": "fc48208a5ac1c0ce",
"hash_full_prompts": "5ec274c6c82aca23",
"hash_input_tokens": "0b969d9ad706a13a",
"hash_cont_tokens": "c96470462fc71683"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-formal_logic|5": {
"hashes": {
"hash_examples": "5a6525665f63ea72",
"hash_full_prompts": "07b92638c4a6b500",
"hash_input_tokens": "02bc3eb5f90da86e",
"hash_cont_tokens": "0e1ce025c9d6ee7e"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-global_facts|5": {
"hashes": {
"hash_examples": "371d70d743b2b89b",
"hash_full_prompts": "332fdee50a1921b4",
"hash_input_tokens": "3d5106918bcbeb43",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_biology|5": {
"hashes": {
"hash_examples": "a79e1018b1674052",
"hash_full_prompts": "e624e26ede922561",
"hash_input_tokens": "7b089392db2dabbd",
"hash_cont_tokens": "e34d57f7d3c4ca16"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_chemistry|5": {
"hashes": {
"hash_examples": "44bfc25c389f0e03",
"hash_full_prompts": "0e3e5f5d9246482a",
"hash_input_tokens": "ba90b2ffed1c067d",
"hash_cont_tokens": "e8482d44df4b3740"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_computer_science|5": {
"hashes": {
"hash_examples": "8b8cdb1084f24169",
"hash_full_prompts": "c00487e67c1813cc",
"hash_input_tokens": "60eeec309ef0717f",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_european_history|5": {
"hashes": {
"hash_examples": "11cd32d0ef440171",
"hash_full_prompts": "318f4513c537c6bf",
"hash_input_tokens": "5e5e8bf3808e0ead",
"hash_cont_tokens": "d63e679a49418339"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_geography|5": {
"hashes": {
"hash_examples": "b60019b9e80b642f",
"hash_full_prompts": "ee5789fcc1a81b1e",
"hash_input_tokens": "4da9b741d4e7ea78",
"hash_cont_tokens": "d78483e286d06f1a"
},
"truncated": 0,
"non_truncated": 198,
"padded": 792,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "d221ec983d143dc3",
"hash_full_prompts": "ac42d888e1ce1155",
"hash_input_tokens": "acb4bc872ac86ed7",
"hash_cont_tokens": "691cdff71ff5fe57"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "59c2915cacfd3fbb",
"hash_full_prompts": "c6bd9d25158abd0e",
"hash_input_tokens": "840fc6403eb69ab0",
"hash_cont_tokens": "d5ad4c5bdca967ad"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1560,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_mathematics|5": {
"hashes": {
"hash_examples": "1f8ac897608de342",
"hash_full_prompts": "5d88f41fc2d643a8",
"hash_input_tokens": "3629a7f2cd17faeb",
"hash_cont_tokens": "8f631ca5687dd0d4"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1080,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"hashes": {
"hash_examples": "ead6a0f2f6c83370",
"hash_full_prompts": "bfc393381298609e",
"hash_input_tokens": "6846f684260e3997",
"hash_cont_tokens": "7321048a28451473"
},
"truncated": 0,
"non_truncated": 238,
"padded": 952,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_physics|5": {
"hashes": {
"hash_examples": "c3f2025990afec64",
"hash_full_prompts": "fc78b4997e436734",
"hash_input_tokens": "85aee25d6bdad94a",
"hash_cont_tokens": "bb137581f269861c"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_psychology|5": {
"hashes": {
"hash_examples": "21f8aab618f6d636",
"hash_full_prompts": "d5c76aa40b9dbc43",
"hash_input_tokens": "290b66d6d666a35f",
"hash_cont_tokens": "b455cab2675bd863"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2180,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_statistics|5": {
"hashes": {
"hash_examples": "2386a60a11fc5de3",
"hash_full_prompts": "4c5c8be5aafac432",
"hash_input_tokens": "a77a7668b437bc82",
"hash_cont_tokens": "1b3196fec7e58037"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_us_history|5": {
"hashes": {
"hash_examples": "74961543be40f04f",
"hash_full_prompts": "5d5ca4840131ba21",
"hash_input_tokens": "63548c7fa9ba7a78",
"hash_cont_tokens": "a331dedc2aa01b3e"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-high_school_world_history|5": {
"hashes": {
"hash_examples": "2ad2f6b7198b2234",
"hash_full_prompts": "11845057459afd72",
"hash_input_tokens": "83c5da18bfa50812",
"hash_cont_tokens": "d0fbe030b8c8c2bf"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_aging|5": {
"hashes": {
"hash_examples": "1a7199dc733e779b",
"hash_full_prompts": "756b9096b8eaf892",
"hash_input_tokens": "bebbd11f22006685",
"hash_cont_tokens": "1dd29c3755494850"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-human_sexuality|5": {
"hashes": {
"hash_examples": "7acb8fdad97f88a6",
"hash_full_prompts": "731a52ff15b8cfdb",
"hash_input_tokens": "7b85ee9b8ee54f4f",
"hash_cont_tokens": "c85573f663c10691"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-international_law|5": {
"hashes": {
"hash_examples": "1300bfd0dfc59114",
"hash_full_prompts": "db2aefbff5eec996",
"hash_input_tokens": "7bfc55ab7065943e",
"hash_cont_tokens": "d263804ba918154f"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-jurisprudence|5": {
"hashes": {
"hash_examples": "083b1e4904c48dc2",
"hash_full_prompts": "0f89ee3fe03d6a21",
"hash_input_tokens": "69573f1675e053c6",
"hash_cont_tokens": "581986691a84ece8"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-logical_fallacies|5": {
"hashes": {
"hash_examples": "709128f9926a634c",
"hash_full_prompts": "98a04b1f8f841069",
"hash_input_tokens": "552324ef20094bdc",
"hash_cont_tokens": "55a858b28bbda458"
},
"truncated": 0,
"non_truncated": 163,
"padded": 652,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-machine_learning|5": {
"hashes": {
"hash_examples": "88f22a636029ae47",
"hash_full_prompts": "2e1c8d4b1e0cc921",
"hash_input_tokens": "96449357a7318905",
"hash_cont_tokens": "e99d3d3efd4ac7a3"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-management|5": {
"hashes": {
"hash_examples": "8c8a1e07a2151dca",
"hash_full_prompts": "f51611f514b265b0",
"hash_input_tokens": "3b849249168e3b88",
"hash_cont_tokens": "13d9dc56bca34726"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-marketing|5": {
"hashes": {
"hash_examples": "2668953431f91e96",
"hash_full_prompts": "77562bef997c7650",
"hash_input_tokens": "af0e186f2756b70d",
"hash_cont_tokens": "2700ea26933916a2"
},
"truncated": 0,
"non_truncated": 234,
"padded": 936,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-medical_genetics|5": {
"hashes": {
"hash_examples": "9c2dda34a2ea4fd2",
"hash_full_prompts": "202139046daa118f",
"hash_input_tokens": "9f6a6de16509b6d9",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-miscellaneous|5": {
"hashes": {
"hash_examples": "41adb694024809c2",
"hash_full_prompts": "bffec9fc237bcf93",
"hash_input_tokens": "9194406d589f7c10",
"hash_cont_tokens": "7bf4341c79587250"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3132,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_disputes|5": {
"hashes": {
"hash_examples": "3171c13ba3c594c4",
"hash_full_prompts": "170831fc36f1d59e",
"hash_input_tokens": "769486efc74d9f8e",
"hash_cont_tokens": "38a48e9de6976f00"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1384,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-moral_scenarios|5": {
"hashes": {
"hash_examples": "9873e077e83e0546",
"hash_full_prompts": "08f4ceba3131a068",
"hash_input_tokens": "a90fd4dd90959dad",
"hash_cont_tokens": "761c4dc187689d89"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-nutrition|5": {
"hashes": {
"hash_examples": "7db1d8142ec14323",
"hash_full_prompts": "4c0e68e3586cb453",
"hash_input_tokens": "1a3b843e66efd29b",
"hash_cont_tokens": "65005bd7d6f6012a"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-philosophy|5": {
"hashes": {
"hash_examples": "9b455b7d72811cc8",
"hash_full_prompts": "e467f822d8a0d3ff",
"hash_input_tokens": "09820001a3d00013",
"hash_cont_tokens": "0b47934fb6314dec"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-prehistory|5": {
"hashes": {
"hash_examples": "8be90d0f538f1560",
"hash_full_prompts": "152187949bcd0921",
"hash_input_tokens": "7c4ec364ce2768c7",
"hash_cont_tokens": "3f20acd855ee0a29"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1296,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_accounting|5": {
"hashes": {
"hash_examples": "8d377597916cd07e",
"hash_full_prompts": "0eb7345d6144ee0d",
"hash_input_tokens": "ced0534574d0ae3f",
"hash_cont_tokens": "8f122ba881355d4b"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1128,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_law|5": {
"hashes": {
"hash_examples": "cd9dbc52b3c932d6",
"hash_full_prompts": "36ac764272bfb182",
"hash_input_tokens": "bcbdbbde22ec73e3",
"hash_cont_tokens": "90d5df417c4d3fd3"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_medicine|5": {
"hashes": {
"hash_examples": "b20e4e816c1e383e",
"hash_full_prompts": "7b8d69ea2acaf2f7",
"hash_input_tokens": "c54d753563114d45",
"hash_cont_tokens": "4a2d2988884f7f70"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-professional_psychology|5": {
"hashes": {
"hash_examples": "d45b73b22f9cc039",
"hash_full_prompts": "fe8937e9ffc99771",
"hash_input_tokens": "b75dc55c0e32fa52",
"hash_cont_tokens": "e0a952cb8a9c81de"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2448,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-public_relations|5": {
"hashes": {
"hash_examples": "0d25072e1761652a",
"hash_full_prompts": "f9adc39cfa9f42ba",
"hash_input_tokens": "5ccdc8ec8db99622",
"hash_cont_tokens": "1fa77a8dff3922b8"
},
"truncated": 0,
"non_truncated": 110,
"padded": 440,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-security_studies|5": {
"hashes": {
"hash_examples": "62bb8197e63d60d4",
"hash_full_prompts": "869c9c3ae196b7c3",
"hash_input_tokens": "ca8497342e5b1d57",
"hash_cont_tokens": "81fc9cb3cbdd52db"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-sociology|5": {
"hashes": {
"hash_examples": "e7959df87dea8672",
"hash_full_prompts": "1a1fc00e17b3a52a",
"hash_input_tokens": "069c76424fbd3dab",
"hash_cont_tokens": "2a0493252ed2cf43"
},
"truncated": 0,
"non_truncated": 201,
"padded": 804,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-us_foreign_policy|5": {
"hashes": {
"hash_examples": "4a56a01ddca44dca",
"hash_full_prompts": "0c7a7081c71c07b6",
"hash_input_tokens": "a7e393a626169576",
"hash_cont_tokens": "17b868b63507f9a3"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-virology|5": {
"hashes": {
"hash_examples": "451cc86a8c4f4fe9",
"hash_full_prompts": "01e95325d8b738e4",
"hash_input_tokens": "bf99dc973e3a650d",
"hash_cont_tokens": "5ab892d003b00c98"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|hendrycksTest-world_religions|5": {
"hashes": {
"hash_examples": "3b29cfaf1a81c379",
"hash_full_prompts": "e0d79a15083dfdff",
"hash_input_tokens": "1761cfaf21797065",
"hash_cont_tokens": "15a5e5dbdfbb8568"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|truthfulqa:mc|0": {
"hashes": {
"hash_examples": "23176c0531c7b867",
"hash_full_prompts": "36a6d90e75d92d4a",
"hash_input_tokens": "298b43914bbdf4ca",
"hash_cont_tokens": "5a8d4bb398b1c3c0"
},
"truncated": 0,
"non_truncated": 817,
"padded": 9996,
"non_padded": 0,
"effective_few_shots": 0,
"num_truncated_few_shots": 0
},
"harness|winogrande|5": {
"hashes": {
"hash_examples": "aada0a176fd81218",
"hash_full_prompts": "c8655cbd12de8409",
"hash_input_tokens": "31aa3477d959f771",
"hash_cont_tokens": "618558fb93c0f288"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2534,
"non_padded": 0,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
},
"harness|gsm8k|5": {
"hashes": {
"hash_examples": "4c0843a5d99bcfdc",
"hash_full_prompts": "41d55e83abc0e02d",
"hash_input_tokens": "6af0ae8cfe684f50",
"hash_cont_tokens": "8869b1a084846632"
},
"truncated": 0,
"non_truncated": 1319,
"padded": 0,
"non_padded": 1319,
"effective_few_shots": 5,
"num_truncated_few_shots": 0
}
} | {
"hashes": {
"hash_examples": "3b7fa57a057f9415",
"hash_full_prompts": "63615fc50fc9417c",
"hash_input_tokens": "9c04e828ae29cacc",
"hash_cont_tokens": "af6584e6aff87cdc"
},
"truncated": 0,
"non_truncated": 28659,
"padded": 113460,
"non_padded": 1412,
"num_truncated_few_shots": 0
} |