config_general
dict
results
dict
versions
dict
config_tasks
dict
summary_tasks
dict
summary_general
dict
{ "lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null, "job_id": "", "start_time": 145877.357026936, "end_time": 167502.554312204, "total_evaluation_time_secondes": "21625.19728526802", "model_name": "JaeyeonKang/CCK_Asura_v1", "model_sha": "7dd3ddea090bd63f3143e70d7d6237cc40c046e4", "model_dtype": "torch.float16", "model_size": "129.73 GB" }
{ "harness|arc:challenge|25": { "acc": 0.7013651877133106, "acc_stderr": 0.013374078615068749, "acc_norm": 0.7389078498293515, "acc_norm_stderr": 0.012835523909473848 }, "harness|hellaswag|10": { "acc": 0.719577773351922, "acc_stderr": 0.004482874732237349, "acc_norm": 0.8906592312288388, "acc_norm_stderr": 0.003114285077228029 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.7111111111111111, "acc_stderr": 0.03915450630414251, "acc_norm": 0.7111111111111111, "acc_norm_stderr": 0.03915450630414251 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8552631578947368, "acc_stderr": 0.0286319518459304, "acc_norm": 0.8552631578947368, "acc_norm_stderr": 0.0286319518459304 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.8, "acc_stderr": 0.04020151261036844, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036844 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8075471698113208, "acc_stderr": 0.024262979839372267, "acc_norm": 0.8075471698113208, "acc_norm_stderr": 0.024262979839372267 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8888888888888888, "acc_stderr": 0.026280550932848087, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.026280550932848087 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7514450867052023, "acc_stderr": 0.03295304696818318, "acc_norm": 0.7514450867052023, "acc_norm_stderr": 0.03295304696818318 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4803921568627451, "acc_stderr": 0.04971358884367406, "acc_norm": 0.4803921568627451, "acc_norm_stderr": 0.04971358884367406 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7404255319148936, "acc_stderr": 0.02865917937429232, "acc_norm": 0.7404255319148936, "acc_norm_stderr": 0.02865917937429232 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5964912280701754, "acc_stderr": 0.04615186962583707, "acc_norm": 0.5964912280701754, "acc_norm_stderr": 0.04615186962583707 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7241379310344828, "acc_stderr": 0.03724563619774632, "acc_norm": 0.7241379310344828, "acc_norm_stderr": 0.03724563619774632 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.5343915343915344, "acc_stderr": 0.02569032176249385, "acc_norm": 0.5343915343915344, "acc_norm_stderr": 0.02569032176249385 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5714285714285714, "acc_stderr": 0.04426266681379909, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.04426266681379909 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.864516129032258, "acc_stderr": 0.019469334586486933, "acc_norm": 0.864516129032258, "acc_norm_stderr": 0.019469334586486933 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6206896551724138, "acc_stderr": 0.034139638059062345, "acc_norm": 0.6206896551724138, "acc_norm_stderr": 0.034139638059062345 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.87, "acc_stderr": 0.03379976689896309, "acc_norm": 0.87, "acc_norm_stderr": 0.03379976689896309 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8363636363636363, "acc_stderr": 0.02888787239548795, "acc_norm": 0.8363636363636363, "acc_norm_stderr": 0.02888787239548795 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9040404040404041, "acc_stderr": 0.020984808610047933, "acc_norm": 0.9040404040404041, "acc_norm_stderr": 0.020984808610047933 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9430051813471503, "acc_stderr": 0.016731085293607558, "acc_norm": 0.9430051813471503, "acc_norm_stderr": 0.016731085293607558 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7897435897435897, "acc_stderr": 0.020660597485026945, "acc_norm": 0.7897435897435897, "acc_norm_stderr": 0.020660597485026945 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.030149135601365944, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.030149135601365944 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.865546218487395, "acc_stderr": 0.022159373072744442, "acc_norm": 0.865546218487395, "acc_norm_stderr": 0.022159373072744442 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.5099337748344371, "acc_stderr": 0.04081677107248436, "acc_norm": 0.5099337748344371, "acc_norm_stderr": 0.04081677107248436 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9192660550458716, "acc_stderr": 0.011680172292862086, "acc_norm": 0.9192660550458716, "acc_norm_stderr": 0.011680172292862086 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6990740740740741, "acc_stderr": 0.031280390843298804, "acc_norm": 0.6990740740740741, "acc_norm_stderr": 0.031280390843298804 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9313725490196079, "acc_stderr": 0.017744453647073315, "acc_norm": 0.9313725490196079, "acc_norm_stderr": 0.017744453647073315 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9240506329113924, "acc_stderr": 0.0172446332510657, "acc_norm": 0.9240506329113924, "acc_norm_stderr": 0.0172446332510657 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7982062780269058, "acc_stderr": 0.02693611191280227, "acc_norm": 0.7982062780269058, "acc_norm_stderr": 0.02693611191280227 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8778625954198473, "acc_stderr": 0.02871877688934232, "acc_norm": 0.8778625954198473, "acc_norm_stderr": 0.02871877688934232 }, "harness|hendrycksTest-international_law|5": { "acc": 0.9090909090909091, "acc_stderr": 0.026243194054073878, "acc_norm": 0.9090909090909091, "acc_norm_stderr": 0.026243194054073878 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8796296296296297, "acc_stderr": 0.0314570385430625, "acc_norm": 0.8796296296296297, "acc_norm_stderr": 0.0314570385430625 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8159509202453987, "acc_stderr": 0.030446777687971723, "acc_norm": 0.8159509202453987, "acc_norm_stderr": 0.030446777687971723 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.6339285714285714, "acc_stderr": 0.0457237235873743, "acc_norm": 0.6339285714285714, "acc_norm_stderr": 0.0457237235873743 }, "harness|hendrycksTest-management|5": { "acc": 0.8737864077669902, "acc_stderr": 0.03288180278808629, "acc_norm": 0.8737864077669902, "acc_norm_stderr": 0.03288180278808629 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9273504273504274, "acc_stderr": 0.01700436856813237, "acc_norm": 0.9273504273504274, "acc_norm_stderr": 0.01700436856813237 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8914431673052363, "acc_stderr": 0.011124283175851183, "acc_norm": 0.8914431673052363, "acc_norm_stderr": 0.011124283175851183 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8439306358381503, "acc_stderr": 0.019539014685374036, "acc_norm": 0.8439306358381503, "acc_norm_stderr": 0.019539014685374036 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6636871508379888, "acc_stderr": 0.0158010037291459, "acc_norm": 0.6636871508379888, "acc_norm_stderr": 0.0158010037291459 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8300653594771242, "acc_stderr": 0.02150538312123138, "acc_norm": 0.8300653594771242, "acc_norm_stderr": 0.02150538312123138 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.819935691318328, "acc_stderr": 0.02182342285774494, "acc_norm": 0.819935691318328, "acc_norm_stderr": 0.02182342285774494 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8487654320987654, "acc_stderr": 0.019935086092149886, "acc_norm": 0.8487654320987654, "acc_norm_stderr": 0.019935086092149886 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6134751773049646, "acc_stderr": 0.02904919034254347, "acc_norm": 0.6134751773049646, "acc_norm_stderr": 0.02904919034254347 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.589960886571056, "acc_stderr": 0.012561837621962032, "acc_norm": 0.589960886571056, "acc_norm_stderr": 0.012561837621962032 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8235294117647058, "acc_stderr": 0.023157468308559345, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.023157468308559345 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.8218954248366013, "acc_stderr": 0.015478369653108568, "acc_norm": 0.8218954248366013, "acc_norm_stderr": 0.015478369653108568 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7181818181818181, "acc_stderr": 0.04309118709946458, "acc_norm": 0.7181818181818181, "acc_norm_stderr": 0.04309118709946458 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8244897959183674, "acc_stderr": 0.02435280072297001, "acc_norm": 0.8244897959183674, "acc_norm_stderr": 0.02435280072297001 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9154228855721394, "acc_stderr": 0.019675343217199173, "acc_norm": 0.9154228855721394, "acc_norm_stderr": 0.019675343217199173 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.95, "acc_stderr": 0.021904291355759057, "acc_norm": 0.95, "acc_norm_stderr": 0.021904291355759057 }, "harness|hendrycksTest-virology|5": { "acc": 0.5783132530120482, "acc_stderr": 0.03844453181770917, "acc_norm": 0.5783132530120482, "acc_norm_stderr": 0.03844453181770917 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.02517298435015577, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.02517298435015577 }, "harness|truthfulqa:mc|0": { "mc1": 0.565483476132191, "mc1_stderr": 0.017352738749259564, "mc2": 0.7174856574663107, "mc2_stderr": 0.014605715133518151 }, "harness|winogrande|5": { "acc": 0.8634569850039463, "acc_stderr": 0.0096502429002916 }, "harness|gsm8k|5": { "acc": 0.6808188021228203, "acc_stderr": 0.012840345676251653 }, "all": { "acc": 0.7535469467828841, "acc_stderr": 0.028473742983492905, "acc_norm": 0.7564527472308834, "acc_norm_stderr": 0.029025433712812198, "mc1": 0.565483476132191, "mc1_stderr": 0.017352738749259564, "mc2": 0.7174856574663107, "mc2_stderr": 0.014605715133518151 } }
{ "all": 0, "harness|arc:challenge|25": 0, "harness|gsm8k|5": 0, "harness|hellaswag|10": 0, "harness|hendrycksTest-abstract_algebra|5": 1, "harness|hendrycksTest-anatomy|5": 1, "harness|hendrycksTest-astronomy|5": 1, "harness|hendrycksTest-business_ethics|5": 1, "harness|hendrycksTest-clinical_knowledge|5": 1, "harness|hendrycksTest-college_biology|5": 1, "harness|hendrycksTest-college_chemistry|5": 1, "harness|hendrycksTest-college_computer_science|5": 1, "harness|hendrycksTest-college_mathematics|5": 1, "harness|hendrycksTest-college_medicine|5": 1, "harness|hendrycksTest-college_physics|5": 1, "harness|hendrycksTest-computer_security|5": 1, "harness|hendrycksTest-conceptual_physics|5": 1, "harness|hendrycksTest-econometrics|5": 1, "harness|hendrycksTest-electrical_engineering|5": 1, "harness|hendrycksTest-elementary_mathematics|5": 1, "harness|hendrycksTest-formal_logic|5": 1, "harness|hendrycksTest-global_facts|5": 1, "harness|hendrycksTest-high_school_biology|5": 1, "harness|hendrycksTest-high_school_chemistry|5": 1, "harness|hendrycksTest-high_school_computer_science|5": 1, "harness|hendrycksTest-high_school_european_history|5": 1, "harness|hendrycksTest-high_school_geography|5": 1, "harness|hendrycksTest-high_school_government_and_politics|5": 1, "harness|hendrycksTest-high_school_macroeconomics|5": 1, "harness|hendrycksTest-high_school_mathematics|5": 1, "harness|hendrycksTest-high_school_microeconomics|5": 1, "harness|hendrycksTest-high_school_physics|5": 1, "harness|hendrycksTest-high_school_psychology|5": 1, "harness|hendrycksTest-high_school_statistics|5": 1, "harness|hendrycksTest-high_school_us_history|5": 1, "harness|hendrycksTest-high_school_world_history|5": 1, "harness|hendrycksTest-human_aging|5": 1, "harness|hendrycksTest-human_sexuality|5": 1, "harness|hendrycksTest-international_law|5": 1, "harness|hendrycksTest-jurisprudence|5": 1, "harness|hendrycksTest-logical_fallacies|5": 1, "harness|hendrycksTest-machine_learning|5": 1, "harness|hendrycksTest-management|5": 1, "harness|hendrycksTest-marketing|5": 1, "harness|hendrycksTest-medical_genetics|5": 1, "harness|hendrycksTest-miscellaneous|5": 1, "harness|hendrycksTest-moral_disputes|5": 1, "harness|hendrycksTest-moral_scenarios|5": 1, "harness|hendrycksTest-nutrition|5": 1, "harness|hendrycksTest-philosophy|5": 1, "harness|hendrycksTest-prehistory|5": 1, "harness|hendrycksTest-professional_accounting|5": 1, "harness|hendrycksTest-professional_law|5": 1, "harness|hendrycksTest-professional_medicine|5": 1, "harness|hendrycksTest-professional_psychology|5": 1, "harness|hendrycksTest-public_relations|5": 1, "harness|hendrycksTest-security_studies|5": 1, "harness|hendrycksTest-sociology|5": 1, "harness|hendrycksTest-us_foreign_policy|5": 1, "harness|hendrycksTest-virology|5": 1, "harness|hendrycksTest-world_religions|5": 1, "harness|truthfulqa:mc|0": 1, "harness|winogrande|5": 0 }
{ "harness|arc:challenge": "LM Harness task", "harness|gsm8k": "LM Harness task", "harness|hellaswag": "LM Harness task", "harness|hendrycksTest-abstract_algebra": "LM Harness task", "harness|hendrycksTest-anatomy": "LM Harness task", "harness|hendrycksTest-astronomy": "LM Harness task", "harness|hendrycksTest-business_ethics": "LM Harness task", "harness|hendrycksTest-clinical_knowledge": "LM Harness task", "harness|hendrycksTest-college_biology": "LM Harness task", "harness|hendrycksTest-college_chemistry": "LM Harness task", "harness|hendrycksTest-college_computer_science": "LM Harness task", "harness|hendrycksTest-college_mathematics": "LM Harness task", "harness|hendrycksTest-college_medicine": "LM Harness task", "harness|hendrycksTest-college_physics": "LM Harness task", "harness|hendrycksTest-computer_security": "LM Harness task", "harness|hendrycksTest-conceptual_physics": "LM Harness task", "harness|hendrycksTest-econometrics": "LM Harness task", "harness|hendrycksTest-electrical_engineering": "LM Harness task", "harness|hendrycksTest-elementary_mathematics": "LM Harness task", "harness|hendrycksTest-formal_logic": "LM Harness task", "harness|hendrycksTest-global_facts": "LM Harness task", "harness|hendrycksTest-high_school_biology": "LM Harness task", "harness|hendrycksTest-high_school_chemistry": "LM Harness task", "harness|hendrycksTest-high_school_computer_science": "LM Harness task", "harness|hendrycksTest-high_school_european_history": "LM Harness task", "harness|hendrycksTest-high_school_geography": "LM Harness task", "harness|hendrycksTest-high_school_government_and_politics": "LM Harness task", "harness|hendrycksTest-high_school_macroeconomics": "LM Harness task", "harness|hendrycksTest-high_school_mathematics": "LM Harness task", "harness|hendrycksTest-high_school_microeconomics": "LM Harness task", "harness|hendrycksTest-high_school_physics": "LM Harness task", "harness|hendrycksTest-high_school_psychology": "LM Harness task", "harness|hendrycksTest-high_school_statistics": "LM Harness task", "harness|hendrycksTest-high_school_us_history": "LM Harness task", "harness|hendrycksTest-high_school_world_history": "LM Harness task", "harness|hendrycksTest-human_aging": "LM Harness task", "harness|hendrycksTest-human_sexuality": "LM Harness task", "harness|hendrycksTest-international_law": "LM Harness task", "harness|hendrycksTest-jurisprudence": "LM Harness task", "harness|hendrycksTest-logical_fallacies": "LM Harness task", "harness|hendrycksTest-machine_learning": "LM Harness task", "harness|hendrycksTest-management": "LM Harness task", "harness|hendrycksTest-marketing": "LM Harness task", "harness|hendrycksTest-medical_genetics": "LM Harness task", "harness|hendrycksTest-miscellaneous": "LM Harness task", "harness|hendrycksTest-moral_disputes": "LM Harness task", "harness|hendrycksTest-moral_scenarios": "LM Harness task", "harness|hendrycksTest-nutrition": "LM Harness task", "harness|hendrycksTest-philosophy": "LM Harness task", "harness|hendrycksTest-prehistory": "LM Harness task", "harness|hendrycksTest-professional_accounting": "LM Harness task", "harness|hendrycksTest-professional_law": "LM Harness task", "harness|hendrycksTest-professional_medicine": "LM Harness task", "harness|hendrycksTest-professional_psychology": "LM Harness task", "harness|hendrycksTest-public_relations": "LM Harness task", "harness|hendrycksTest-security_studies": "LM Harness task", "harness|hendrycksTest-sociology": "LM Harness task", "harness|hendrycksTest-us_foreign_policy": "LM Harness task", "harness|hendrycksTest-virology": "LM Harness task", "harness|hendrycksTest-world_religions": "LM Harness task", "harness|truthfulqa:mc": "LM Harness task", "harness|winogrande": "LM Harness task" }
{ "harness|arc:challenge|25": { "hashes": { "hash_examples": "17b0cae357c0259e", "hash_full_prompts": "045cbb916e5145c6", "hash_input_tokens": "ca48d52265c0051f", "hash_cont_tokens": "e8abf848493b50f7" }, "truncated": 0, "non_truncated": 1172, "padded": 4687, "non_padded": 0, "effective_few_shots": 25, "num_truncated_few_shots": 0 }, "harness|hellaswag|10": { "hashes": { "hash_examples": "e1768ecb99d7ecf0", "hash_full_prompts": "0b4c16983130f84f", "hash_input_tokens": "4975ded0ed31f702", "hash_cont_tokens": "9fe0a5c42e1532db" }, "truncated": 0, "non_truncated": 10042, "padded": 40019, "non_padded": 149, "effective_few_shots": 10, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-abstract_algebra|5": { "hashes": { "hash_examples": "280f9f325b40559a", "hash_full_prompts": "2f776a367d23aea2", "hash_input_tokens": "8ff523ec326d5d55", "hash_cont_tokens": "50421e30bef398f9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-anatomy|5": { "hashes": { "hash_examples": "2f83a4f1cab4ba18", "hash_full_prompts": "516f74bef25df620", "hash_input_tokens": "742bd6a389a8ef40", "hash_cont_tokens": "f11971a765cb609f" }, "truncated": 0, "non_truncated": 135, "padded": 540, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-astronomy|5": { "hashes": { "hash_examples": "7d587b908da4d762", "hash_full_prompts": "faf4e80f65de93ca", "hash_input_tokens": "aa9743839c83bd9f", "hash_cont_tokens": "440a970fadecdc7b" }, "truncated": 0, "non_truncated": 152, "padded": 608, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-business_ethics|5": { "hashes": { "hash_examples": "33e51740670de686", "hash_full_prompts": "db01c3ef8e1479d4", "hash_input_tokens": "60f6ed52e2a2987a", "hash_cont_tokens": "50421e30bef398f9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-clinical_knowledge|5": { "hashes": { "hash_examples": "f3366dbe7eefffa4", "hash_full_prompts": "49654f71d94b65c3", "hash_input_tokens": "6080d9f3c5930be0", "hash_cont_tokens": "7ecd60c25b9bfe5b" }, "truncated": 0, "non_truncated": 265, "padded": 1060, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_biology|5": { "hashes": { "hash_examples": "ca2b6753a0193e7f", "hash_full_prompts": "2b460b75f1fdfefd", "hash_input_tokens": "873319724ad65589", "hash_cont_tokens": "875cde3af7a0ee14" }, "truncated": 0, "non_truncated": 144, "padded": 564, "non_padded": 12, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_chemistry|5": { "hashes": { "hash_examples": "22ff85f1d34f42d1", "hash_full_prompts": "242c9be6da583e95", "hash_input_tokens": "8366d04d12b154a7", "hash_cont_tokens": "50421e30bef398f9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_computer_science|5": { "hashes": { "hash_examples": "30318289d717a5cf", "hash_full_prompts": "ed2bdb4e87c4b371", "hash_input_tokens": "1724a282fb269fd7", "hash_cont_tokens": "50421e30bef398f9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_mathematics|5": { "hashes": { "hash_examples": "4944d1f0b6b5d911", "hash_full_prompts": "770bc4281c973190", "hash_input_tokens": "b7aa815781eae172", "hash_cont_tokens": "50421e30bef398f9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_medicine|5": { "hashes": { "hash_examples": "dd69cc33381275af", "hash_full_prompts": "ad2a53e5250ab46e", "hash_input_tokens": "0003d13e86bc8c1a", "hash_cont_tokens": "702fb6d82ff0d6ac" }, "truncated": 0, "non_truncated": 173, "padded": 692, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_physics|5": { "hashes": { "hash_examples": "875dd26d22655b0d", "hash_full_prompts": "833a0d7b55aed500", "hash_input_tokens": "32b28762dd077c78", "hash_cont_tokens": "f7b8097afc16a47c" }, "truncated": 0, "non_truncated": 102, "padded": 404, "non_padded": 4, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-computer_security|5": { "hashes": { "hash_examples": "006451eedc0ededb", "hash_full_prompts": "94034c97e85d8f46", "hash_input_tokens": "19dd0e1895125d49", "hash_cont_tokens": "50421e30bef398f9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-conceptual_physics|5": { "hashes": { "hash_examples": "8874ece872d2ca4c", "hash_full_prompts": "e40d15a34640d6fa", "hash_input_tokens": "761c7ce187b3338a", "hash_cont_tokens": "aa0e8bc655f2f641" }, "truncated": 0, "non_truncated": 235, "padded": 940, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-econometrics|5": { "hashes": { "hash_examples": "64d3623b0bfaa43f", "hash_full_prompts": "612f340fae41338d", "hash_input_tokens": "dae74024ebc12b2b", "hash_cont_tokens": "b1cc6e7e9fcd3827" }, "truncated": 0, "non_truncated": 114, "padded": 456, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-electrical_engineering|5": { "hashes": { "hash_examples": "e98f51780c674d7e", "hash_full_prompts": "10275b312d812ae6", "hash_input_tokens": "5fa8050688a246ed", "hash_cont_tokens": "2425a3f084a591ef" }, "truncated": 0, "non_truncated": 145, "padded": 580, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-elementary_mathematics|5": { "hashes": { "hash_examples": "fc48208a5ac1c0ce", "hash_full_prompts": "5ec274c6c82aca23", "hash_input_tokens": "2da3f8d7d1515cc6", "hash_cont_tokens": "bd87bf0c060fd925" }, "truncated": 0, "non_truncated": 378, "padded": 1512, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-formal_logic|5": { "hashes": { "hash_examples": "5a6525665f63ea72", "hash_full_prompts": "07b92638c4a6b500", "hash_input_tokens": "907de61bbe46dada", "hash_cont_tokens": "eb8932890e0605db" }, "truncated": 0, "non_truncated": 126, "padded": 504, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-global_facts|5": { "hashes": { "hash_examples": "371d70d743b2b89b", "hash_full_prompts": "332fdee50a1921b4", "hash_input_tokens": "d7549fe9ac133643", "hash_cont_tokens": "50421e30bef398f9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_biology|5": { "hashes": { "hash_examples": "a79e1018b1674052", "hash_full_prompts": "e624e26ede922561", "hash_input_tokens": "b449ae8cd622fb96", "hash_cont_tokens": "1ddcb86d28cde266" }, "truncated": 0, "non_truncated": 310, "padded": 1240, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_chemistry|5": { "hashes": { "hash_examples": "44bfc25c389f0e03", "hash_full_prompts": "0e3e5f5d9246482a", "hash_input_tokens": "a447bd1574b5e26c", "hash_cont_tokens": "176c8dcff38c5f8f" }, "truncated": 0, "non_truncated": 203, "padded": 812, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_computer_science|5": { "hashes": { "hash_examples": "8b8cdb1084f24169", "hash_full_prompts": "c00487e67c1813cc", "hash_input_tokens": "56312a0c3d85ae90", "hash_cont_tokens": "50421e30bef398f9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_european_history|5": { "hashes": { "hash_examples": "11cd32d0ef440171", "hash_full_prompts": "318f4513c537c6bf", "hash_input_tokens": "5002f4ac8b1562ca", "hash_cont_tokens": "674fc454bdc5ac93" }, "truncated": 0, "non_truncated": 165, "padded": 656, "non_padded": 4, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_geography|5": { "hashes": { "hash_examples": "b60019b9e80b642f", "hash_full_prompts": "ee5789fcc1a81b1e", "hash_input_tokens": "b4f9efd054b0149d", "hash_cont_tokens": "03a5012b916274ea" }, "truncated": 0, "non_truncated": 198, "padded": 792, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "hashes": { "hash_examples": "d221ec983d143dc3", "hash_full_prompts": "ac42d888e1ce1155", "hash_input_tokens": "6e010d01707b5a01", "hash_cont_tokens": "873d2aab226ba1d8" }, "truncated": 0, "non_truncated": 193, "padded": 772, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "hashes": { "hash_examples": "59c2915cacfd3fbb", "hash_full_prompts": "c6bd9d25158abd0e", "hash_input_tokens": "fc1f6e824ba386d7", "hash_cont_tokens": "c583432ad27fcfe0" }, "truncated": 0, "non_truncated": 390, "padded": 1560, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_mathematics|5": { "hashes": { "hash_examples": "1f8ac897608de342", "hash_full_prompts": "5d88f41fc2d643a8", "hash_input_tokens": "3a485a40c8432ece", "hash_cont_tokens": "d7907b61bcb8c123" }, "truncated": 0, "non_truncated": 270, "padded": 1080, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_microeconomics|5": { "hashes": { "hash_examples": "ead6a0f2f6c83370", "hash_full_prompts": "bfc393381298609e", "hash_input_tokens": "a7dd9ca4bbda3752", "hash_cont_tokens": "f47f041de50333b9" }, "truncated": 0, "non_truncated": 238, "padded": 952, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_physics|5": { "hashes": { "hash_examples": "c3f2025990afec64", "hash_full_prompts": "fc78b4997e436734", "hash_input_tokens": "d7ea631399a73865", "hash_cont_tokens": "0d56317b3e5eedb5" }, "truncated": 0, "non_truncated": 151, "padded": 604, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_psychology|5": { "hashes": { "hash_examples": "21f8aab618f6d636", "hash_full_prompts": "d5c76aa40b9dbc43", "hash_input_tokens": "d12816cf88146011", "hash_cont_tokens": "09ba1243e7390c0f" }, "truncated": 0, "non_truncated": 545, "padded": 2180, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_statistics|5": { "hashes": { "hash_examples": "2386a60a11fc5de3", "hash_full_prompts": "4c5c8be5aafac432", "hash_input_tokens": "9763ecaef4814c21", "hash_cont_tokens": "9cc29889c3d3f77d" }, "truncated": 0, "non_truncated": 216, "padded": 864, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_us_history|5": { "hashes": { "hash_examples": "74961543be40f04f", "hash_full_prompts": "5d5ca4840131ba21", "hash_input_tokens": "c639cce12a46ebad", "hash_cont_tokens": "cdd0b3dc06d933e5" }, "truncated": 0, "non_truncated": 204, "padded": 816, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_world_history|5": { "hashes": { "hash_examples": "2ad2f6b7198b2234", "hash_full_prompts": "11845057459afd72", "hash_input_tokens": "b9762065cce6f3a6", "hash_cont_tokens": "e02816433ff28daf" }, "truncated": 0, "non_truncated": 237, "padded": 948, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-human_aging|5": { "hashes": { "hash_examples": "1a7199dc733e779b", "hash_full_prompts": "756b9096b8eaf892", "hash_input_tokens": "84157fee0b6d0f3c", "hash_cont_tokens": "142a4a8a1138a214" }, "truncated": 0, "non_truncated": 223, "padded": 892, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-human_sexuality|5": { "hashes": { "hash_examples": "7acb8fdad97f88a6", "hash_full_prompts": "731a52ff15b8cfdb", "hash_input_tokens": "ade303e1ae3c016f", "hash_cont_tokens": "bc54813e809b796d" }, "truncated": 0, "non_truncated": 131, "padded": 524, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-international_law|5": { "hashes": { "hash_examples": "1300bfd0dfc59114", "hash_full_prompts": "db2aefbff5eec996", "hash_input_tokens": "e5482e1c23c23d35", "hash_cont_tokens": "8ea8c5ff76a15bca" }, "truncated": 0, "non_truncated": 121, "padded": 484, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-jurisprudence|5": { "hashes": { "hash_examples": "083b1e4904c48dc2", "hash_full_prompts": "0f89ee3fe03d6a21", "hash_input_tokens": "4415eeb9bad0507b", "hash_cont_tokens": "e3a8cd951b6e3469" }, "truncated": 0, "non_truncated": 108, "padded": 432, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-logical_fallacies|5": { "hashes": { "hash_examples": "709128f9926a634c", "hash_full_prompts": "98a04b1f8f841069", "hash_input_tokens": "e6b5271422ecbaa8", "hash_cont_tokens": "3e9e0bdc248fd88a" }, "truncated": 0, "non_truncated": 163, "padded": 644, "non_padded": 8, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-machine_learning|5": { "hashes": { "hash_examples": "88f22a636029ae47", "hash_full_prompts": "2e1c8d4b1e0cc921", "hash_input_tokens": "e719cb83196977d8", "hash_cont_tokens": "55b12fb138c6a064" }, "truncated": 0, "non_truncated": 112, "padded": 448, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-management|5": { "hashes": { "hash_examples": "8c8a1e07a2151dca", "hash_full_prompts": "f51611f514b265b0", "hash_input_tokens": "155da0e62b39e804", "hash_cont_tokens": "a01d6d39a83c4597" }, "truncated": 0, "non_truncated": 103, "padded": 412, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-marketing|5": { "hashes": { "hash_examples": "2668953431f91e96", "hash_full_prompts": "77562bef997c7650", "hash_input_tokens": "38466c242259e6d3", "hash_cont_tokens": "6aeaed4d823c98aa" }, "truncated": 0, "non_truncated": 234, "padded": 932, "non_padded": 4, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-medical_genetics|5": { "hashes": { "hash_examples": "9c2dda34a2ea4fd2", "hash_full_prompts": "202139046daa118f", "hash_input_tokens": "0dd129e92538a7f6", "hash_cont_tokens": "50421e30bef398f9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-miscellaneous|5": { "hashes": { "hash_examples": "41adb694024809c2", "hash_full_prompts": "bffec9fc237bcf93", "hash_input_tokens": "d108a883fc3e022f", "hash_cont_tokens": "9b0ab02a64603081" }, "truncated": 0, "non_truncated": 783, "padded": 3132, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-moral_disputes|5": { "hashes": { "hash_examples": "3171c13ba3c594c4", "hash_full_prompts": "170831fc36f1d59e", "hash_input_tokens": "0e7b7df82884a2d5", "hash_cont_tokens": "3b8bbe9108e55ce9" }, "truncated": 0, "non_truncated": 346, "padded": 1364, "non_padded": 20, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-moral_scenarios|5": { "hashes": { "hash_examples": "9873e077e83e0546", "hash_full_prompts": "08f4ceba3131a068", "hash_input_tokens": "7c220f5613cd8426", "hash_cont_tokens": "3e9bfc0362e97330" }, "truncated": 0, "non_truncated": 895, "padded": 3580, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-nutrition|5": { "hashes": { "hash_examples": "7db1d8142ec14323", "hash_full_prompts": "4c0e68e3586cb453", "hash_input_tokens": "35de1609a9a763a9", "hash_cont_tokens": "23b2dc6ee2da4cfc" }, "truncated": 0, "non_truncated": 306, "padded": 1224, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-philosophy|5": { "hashes": { "hash_examples": "9b455b7d72811cc8", "hash_full_prompts": "e467f822d8a0d3ff", "hash_input_tokens": "a1dcfa9c80490d06", "hash_cont_tokens": "9f6ff69d23a48783" }, "truncated": 0, "non_truncated": 311, "padded": 1244, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-prehistory|5": { "hashes": { "hash_examples": "8be90d0f538f1560", "hash_full_prompts": "152187949bcd0921", "hash_input_tokens": "a091cf645d2415e0", "hash_cont_tokens": "d6458d743d875837" }, "truncated": 0, "non_truncated": 324, "padded": 1296, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_accounting|5": { "hashes": { "hash_examples": "8d377597916cd07e", "hash_full_prompts": "0eb7345d6144ee0d", "hash_input_tokens": "e9df32a33f85290c", "hash_cont_tokens": "922a195f53a35662" }, "truncated": 0, "non_truncated": 282, "padded": 1128, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_law|5": { "hashes": { "hash_examples": "cd9dbc52b3c932d6", "hash_full_prompts": "36ac764272bfb182", "hash_input_tokens": "c9f7583fff66d361", "hash_cont_tokens": "2e590029ef41fbcd" }, "truncated": 0, "non_truncated": 1534, "padded": 6136, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_medicine|5": { "hashes": { "hash_examples": "b20e4e816c1e383e", "hash_full_prompts": "7b8d69ea2acaf2f7", "hash_input_tokens": "40a933f829116f8d", "hash_cont_tokens": "7cfee54dbddd5a98" }, "truncated": 0, "non_truncated": 272, "padded": 1088, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_psychology|5": { "hashes": { "hash_examples": "d45b73b22f9cc039", "hash_full_prompts": "fe8937e9ffc99771", "hash_input_tokens": "0f6a92c3a2062b48", "hash_cont_tokens": "a86677b2a45c20e1" }, "truncated": 0, "non_truncated": 612, "padded": 2448, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-public_relations|5": { "hashes": { "hash_examples": "0d25072e1761652a", "hash_full_prompts": "f9adc39cfa9f42ba", "hash_input_tokens": "29a08e9bfbe9b2f0", "hash_cont_tokens": "0d756ccaae031757" }, "truncated": 0, "non_truncated": 110, "padded": 440, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-security_studies|5": { "hashes": { "hash_examples": "62bb8197e63d60d4", "hash_full_prompts": "869c9c3ae196b7c3", "hash_input_tokens": "32a03f1f22a6e103", "hash_cont_tokens": "b2229bc2cfbf594b" }, "truncated": 0, "non_truncated": 245, "padded": 980, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-sociology|5": { "hashes": { "hash_examples": "e7959df87dea8672", "hash_full_prompts": "1a1fc00e17b3a52a", "hash_input_tokens": "1de5c52d2b2831d7", "hash_cont_tokens": "c3a3bdfd177eed5b" }, "truncated": 0, "non_truncated": 201, "padded": 800, "non_padded": 4, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-us_foreign_policy|5": { "hashes": { "hash_examples": "4a56a01ddca44dca", "hash_full_prompts": "0c7a7081c71c07b6", "hash_input_tokens": "add924961f7f4146", "hash_cont_tokens": "50421e30bef398f9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-virology|5": { "hashes": { "hash_examples": "451cc86a8c4f4fe9", "hash_full_prompts": "01e95325d8b738e4", "hash_input_tokens": "e0653601c466b1bc", "hash_cont_tokens": "af8b3658088cb37f" }, "truncated": 0, "non_truncated": 166, "padded": 664, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-world_religions|5": { "hashes": { "hash_examples": "3b29cfaf1a81c379", "hash_full_prompts": "e0d79a15083dfdff", "hash_input_tokens": "ac600d612445156d", "hash_cont_tokens": "060118bef6de4e0a" }, "truncated": 0, "non_truncated": 171, "padded": 684, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|truthfulqa:mc|0": { "hashes": { "hash_examples": "23176c0531c7b867", "hash_full_prompts": "36a6d90e75d92d4a", "hash_input_tokens": "a03ce28b7fd06aa7", "hash_cont_tokens": "f5da56a132aab151" }, "truncated": 0, "non_truncated": 817, "padded": 9996, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "harness|winogrande|5": { "hashes": { "hash_examples": "aada0a176fd81218", "hash_full_prompts": "c8655cbd12de8409", "hash_input_tokens": "72067255e368e24e", "hash_cont_tokens": "f08975ad6f2d5864" }, "truncated": 0, "non_truncated": 1267, "padded": 2534, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|gsm8k|5": { "hashes": { "hash_examples": "4c0843a5d99bcfdc", "hash_full_prompts": "41d55e83abc0e02d", "hash_input_tokens": "bda342e47b5099b2", "hash_cont_tokens": "696e9690b146bd30" }, "truncated": 0, "non_truncated": 1319, "padded": 0, "non_padded": 1319, "effective_few_shots": 5, "num_truncated_few_shots": 0 } }
{ "hashes": { "hash_examples": "3b7fa57a057f9415", "hash_full_prompts": "63615fc50fc9417c", "hash_input_tokens": "a8fa53915153e1db", "hash_cont_tokens": "62b683fb5cadf0a1" }, "truncated": 0, "non_truncated": 28659, "padded": 113348, "non_padded": 1524, "num_truncated_few_shots": 0 }
{ "lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null, "job_id": "", "start_time": 3973041.324489038, "end_time": 4009208.078815405, "total_evaluation_time_secondes": "36166.75432636682", "model_name": "abacusai/Smaug-72B-v0.1", "model_sha": "54a8c35600ec5cb30ca2129247854ece23e57f57", "model_dtype": "torch.bfloat16", "model_size": "135.9 GB" }
{ "harness|arc:challenge|25": { "acc": 0.735494880546075, "acc_stderr": 0.012889272949313371, "acc_norm": 0.7602389078498294, "acc_norm_stderr": 0.012476304127453944 }, "harness|hellaswag|10": { "acc": 0.7199761003784106, "acc_stderr": 0.004480929450281562, "acc_norm": 0.8926508663612827, "acc_norm_stderr": 0.0030892396746331585 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.7185185185185186, "acc_stderr": 0.038850042458002526, "acc_norm": 0.7185185185185186, "acc_norm_stderr": 0.038850042458002526 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.881578947368421, "acc_stderr": 0.026293995855474928, "acc_norm": 0.881578947368421, "acc_norm_stderr": 0.026293995855474928 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.82, "acc_stderr": 0.038612291966536955, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536955 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8452830188679246, "acc_stderr": 0.022257075558791282, "acc_norm": 0.8452830188679246, "acc_norm_stderr": 0.022257075558791282 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.9305555555555556, "acc_stderr": 0.021257974822832048, "acc_norm": 0.9305555555555556, "acc_norm_stderr": 0.021257974822832048 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.62, "acc_stderr": 0.04878317312145633, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7456647398843931, "acc_stderr": 0.0332055644308557, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.0332055644308557 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.5686274509803921, "acc_stderr": 0.04928099597287534, "acc_norm": 0.5686274509803921, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.81, "acc_stderr": 0.03942772444036622, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036622 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7914893617021277, "acc_stderr": 0.026556982117838728, "acc_norm": 0.7914893617021277, "acc_norm_stderr": 0.026556982117838728 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.6140350877192983, "acc_stderr": 0.04579639422070434, "acc_norm": 0.6140350877192983, "acc_norm_stderr": 0.04579639422070434 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7724137931034483, "acc_stderr": 0.03493950380131184, "acc_norm": 0.7724137931034483, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.6904761904761905, "acc_stderr": 0.023809523809523864, "acc_norm": 0.6904761904761905, "acc_norm_stderr": 0.023809523809523864 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5714285714285714, "acc_stderr": 0.04426266681379909, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.04426266681379909 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8838709677419355, "acc_stderr": 0.018225757949432306, "acc_norm": 0.8838709677419355, "acc_norm_stderr": 0.018225757949432306 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6600985221674877, "acc_stderr": 0.033327690684107895, "acc_norm": 0.6600985221674877, "acc_norm_stderr": 0.033327690684107895 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.82, "acc_stderr": 0.038612291966536934, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8545454545454545, "acc_stderr": 0.027530196355066584, "acc_norm": 0.8545454545454545, "acc_norm_stderr": 0.027530196355066584 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9393939393939394, "acc_stderr": 0.016999994927421592, "acc_norm": 0.9393939393939394, "acc_norm_stderr": 0.016999994927421592 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9844559585492227, "acc_stderr": 0.008927492715084315, "acc_norm": 0.9844559585492227, "acc_norm_stderr": 0.008927492715084315 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.8076923076923077, "acc_stderr": 0.019982347208637282, "acc_norm": 0.8076923076923077, "acc_norm_stderr": 0.019982347208637282 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.4703703703703704, "acc_stderr": 0.030431963547936584, "acc_norm": 0.4703703703703704, "acc_norm_stderr": 0.030431963547936584 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8445378151260504, "acc_stderr": 0.023536818625398904, "acc_norm": 0.8445378151260504, "acc_norm_stderr": 0.023536818625398904 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.5629139072847682, "acc_stderr": 0.040500357222306355, "acc_norm": 0.5629139072847682, "acc_norm_stderr": 0.040500357222306355 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9357798165137615, "acc_stderr": 0.010510494713201403, "acc_norm": 0.9357798165137615, "acc_norm_stderr": 0.010510494713201403 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6805555555555556, "acc_stderr": 0.03179876342176853, "acc_norm": 0.6805555555555556, "acc_norm_stderr": 0.03179876342176853 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9117647058823529, "acc_stderr": 0.019907399791316945, "acc_norm": 0.9117647058823529, "acc_norm_stderr": 0.019907399791316945 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9113924050632911, "acc_stderr": 0.018498315206865384, "acc_norm": 0.9113924050632911, "acc_norm_stderr": 0.018498315206865384 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7982062780269058, "acc_stderr": 0.02693611191280227, "acc_norm": 0.7982062780269058, "acc_norm_stderr": 0.02693611191280227 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8931297709923665, "acc_stderr": 0.027096548624883733, "acc_norm": 0.8931297709923665, "acc_norm_stderr": 0.027096548624883733 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8925619834710744, "acc_stderr": 0.028268812192540616, "acc_norm": 0.8925619834710744, "acc_norm_stderr": 0.028268812192540616 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8611111111111112, "acc_stderr": 0.033432700628696195, "acc_norm": 0.8611111111111112, "acc_norm_stderr": 0.033432700628696195 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8343558282208589, "acc_stderr": 0.029208296231259104, "acc_norm": 0.8343558282208589, "acc_norm_stderr": 0.029208296231259104 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.6160714285714286, "acc_stderr": 0.04616143075028546, "acc_norm": 0.6160714285714286, "acc_norm_stderr": 0.04616143075028546 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.0349260647662379, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.0349260647662379 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9401709401709402, "acc_stderr": 0.015537514263253874, "acc_norm": 0.9401709401709402, "acc_norm_stderr": 0.015537514263253874 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.86, "acc_stderr": 0.034873508801977725, "acc_norm": 0.86, "acc_norm_stderr": 0.034873508801977725 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.9169859514687101, "acc_stderr": 0.009866287394639536, "acc_norm": 0.9169859514687101, "acc_norm_stderr": 0.009866287394639536 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8410404624277457, "acc_stderr": 0.019685307033571946, "acc_norm": 0.8410404624277457, "acc_norm_stderr": 0.019685307033571946 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6960893854748603, "acc_stderr": 0.01538284558758452, "acc_norm": 0.6960893854748603, "acc_norm_stderr": 0.01538284558758452 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8496732026143791, "acc_stderr": 0.02046417512433263, "acc_norm": 0.8496732026143791, "acc_norm_stderr": 0.02046417512433263 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.842443729903537, "acc_stderr": 0.020692237273583984, "acc_norm": 0.842443729903537, "acc_norm_stderr": 0.020692237273583984 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8641975308641975, "acc_stderr": 0.019061588181505405, "acc_norm": 0.8641975308641975, "acc_norm_stderr": 0.019061588181505405 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6560283687943262, "acc_stderr": 0.02833801742861133, "acc_norm": 0.6560283687943262, "acc_norm_stderr": 0.02833801742861133 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.6023468057366362, "acc_stderr": 0.012499840347460642, "acc_norm": 0.6023468057366362, "acc_norm_stderr": 0.012499840347460642 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8345588235294118, "acc_stderr": 0.02257177102549473, "acc_norm": 0.8345588235294118, "acc_norm_stderr": 0.02257177102549473 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.815359477124183, "acc_stderr": 0.015697029240757773, "acc_norm": 0.815359477124183, "acc_norm_stderr": 0.015697029240757773 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7454545454545455, "acc_stderr": 0.04172343038705383, "acc_norm": 0.7454545454545455, "acc_norm_stderr": 0.04172343038705383 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8163265306122449, "acc_stderr": 0.024789071332007646, "acc_norm": 0.8163265306122449, "acc_norm_stderr": 0.024789071332007646 }, "harness|hendrycksTest-sociology|5": { "acc": 0.900497512437811, "acc_stderr": 0.021166216304659397, "acc_norm": 0.900497512437811, "acc_norm_stderr": 0.021166216304659397 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.93, "acc_stderr": 0.0256432399976243, "acc_norm": 0.93, "acc_norm_stderr": 0.0256432399976243 }, "harness|hendrycksTest-virology|5": { "acc": 0.5783132530120482, "acc_stderr": 0.038444531817709175, "acc_norm": 0.5783132530120482, "acc_norm_stderr": 0.038444531817709175 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8713450292397661, "acc_stderr": 0.025679342723276894, "acc_norm": 0.8713450292397661, "acc_norm_stderr": 0.025679342723276894 }, "harness|truthfulqa:mc|0": { "mc1": 0.6560587515299877, "mc1_stderr": 0.016629087514276785, "mc2": 0.7666613083747418, "mc2_stderr": 0.014124410528709273 }, "harness|winogrande|5": { "acc": 0.850828729281768, "acc_stderr": 0.010012598805627305 }, "harness|gsm8k|5": { "acc": 0.7869598180439727, "acc_stderr": 0.01127844785690078 }, "all": { "acc": 0.7716613011645818, "acc_stderr": 0.02801089457302993, "acc_norm": 0.7734062646949216, "acc_norm_stderr": 0.028568963791437117, "mc1": 0.6560587515299877, "mc1_stderr": 0.016629087514276785, "mc2": 0.7666613083747418, "mc2_stderr": 0.014124410528709273 } }
{ "all": 0, "harness|arc:challenge|25": 0, "harness|gsm8k|5": 0, "harness|hellaswag|10": 0, "harness|hendrycksTest-abstract_algebra|5": 1, "harness|hendrycksTest-anatomy|5": 1, "harness|hendrycksTest-astronomy|5": 1, "harness|hendrycksTest-business_ethics|5": 1, "harness|hendrycksTest-clinical_knowledge|5": 1, "harness|hendrycksTest-college_biology|5": 1, "harness|hendrycksTest-college_chemistry|5": 1, "harness|hendrycksTest-college_computer_science|5": 1, "harness|hendrycksTest-college_mathematics|5": 1, "harness|hendrycksTest-college_medicine|5": 1, "harness|hendrycksTest-college_physics|5": 1, "harness|hendrycksTest-computer_security|5": 1, "harness|hendrycksTest-conceptual_physics|5": 1, "harness|hendrycksTest-econometrics|5": 1, "harness|hendrycksTest-electrical_engineering|5": 1, "harness|hendrycksTest-elementary_mathematics|5": 1, "harness|hendrycksTest-formal_logic|5": 1, "harness|hendrycksTest-global_facts|5": 1, "harness|hendrycksTest-high_school_biology|5": 1, "harness|hendrycksTest-high_school_chemistry|5": 1, "harness|hendrycksTest-high_school_computer_science|5": 1, "harness|hendrycksTest-high_school_european_history|5": 1, "harness|hendrycksTest-high_school_geography|5": 1, "harness|hendrycksTest-high_school_government_and_politics|5": 1, "harness|hendrycksTest-high_school_macroeconomics|5": 1, "harness|hendrycksTest-high_school_mathematics|5": 1, "harness|hendrycksTest-high_school_microeconomics|5": 1, "harness|hendrycksTest-high_school_physics|5": 1, "harness|hendrycksTest-high_school_psychology|5": 1, "harness|hendrycksTest-high_school_statistics|5": 1, "harness|hendrycksTest-high_school_us_history|5": 1, "harness|hendrycksTest-high_school_world_history|5": 1, "harness|hendrycksTest-human_aging|5": 1, "harness|hendrycksTest-human_sexuality|5": 1, "harness|hendrycksTest-international_law|5": 1, "harness|hendrycksTest-jurisprudence|5": 1, "harness|hendrycksTest-logical_fallacies|5": 1, "harness|hendrycksTest-machine_learning|5": 1, "harness|hendrycksTest-management|5": 1, "harness|hendrycksTest-marketing|5": 1, "harness|hendrycksTest-medical_genetics|5": 1, "harness|hendrycksTest-miscellaneous|5": 1, "harness|hendrycksTest-moral_disputes|5": 1, "harness|hendrycksTest-moral_scenarios|5": 1, "harness|hendrycksTest-nutrition|5": 1, "harness|hendrycksTest-philosophy|5": 1, "harness|hendrycksTest-prehistory|5": 1, "harness|hendrycksTest-professional_accounting|5": 1, "harness|hendrycksTest-professional_law|5": 1, "harness|hendrycksTest-professional_medicine|5": 1, "harness|hendrycksTest-professional_psychology|5": 1, "harness|hendrycksTest-public_relations|5": 1, "harness|hendrycksTest-security_studies|5": 1, "harness|hendrycksTest-sociology|5": 1, "harness|hendrycksTest-us_foreign_policy|5": 1, "harness|hendrycksTest-virology|5": 1, "harness|hendrycksTest-world_religions|5": 1, "harness|truthfulqa:mc|0": 1, "harness|winogrande|5": 0 }
{ "harness|arc:challenge": "LM Harness task", "harness|gsm8k": "LM Harness task", "harness|hellaswag": "LM Harness task", "harness|hendrycksTest-abstract_algebra": "LM Harness task", "harness|hendrycksTest-anatomy": "LM Harness task", "harness|hendrycksTest-astronomy": "LM Harness task", "harness|hendrycksTest-business_ethics": "LM Harness task", "harness|hendrycksTest-clinical_knowledge": "LM Harness task", "harness|hendrycksTest-college_biology": "LM Harness task", "harness|hendrycksTest-college_chemistry": "LM Harness task", "harness|hendrycksTest-college_computer_science": "LM Harness task", "harness|hendrycksTest-college_mathematics": "LM Harness task", "harness|hendrycksTest-college_medicine": "LM Harness task", "harness|hendrycksTest-college_physics": "LM Harness task", "harness|hendrycksTest-computer_security": "LM Harness task", "harness|hendrycksTest-conceptual_physics": "LM Harness task", "harness|hendrycksTest-econometrics": "LM Harness task", "harness|hendrycksTest-electrical_engineering": "LM Harness task", "harness|hendrycksTest-elementary_mathematics": "LM Harness task", "harness|hendrycksTest-formal_logic": "LM Harness task", "harness|hendrycksTest-global_facts": "LM Harness task", "harness|hendrycksTest-high_school_biology": "LM Harness task", "harness|hendrycksTest-high_school_chemistry": "LM Harness task", "harness|hendrycksTest-high_school_computer_science": "LM Harness task", "harness|hendrycksTest-high_school_european_history": "LM Harness task", "harness|hendrycksTest-high_school_geography": "LM Harness task", "harness|hendrycksTest-high_school_government_and_politics": "LM Harness task", "harness|hendrycksTest-high_school_macroeconomics": "LM Harness task", "harness|hendrycksTest-high_school_mathematics": "LM Harness task", "harness|hendrycksTest-high_school_microeconomics": "LM Harness task", "harness|hendrycksTest-high_school_physics": "LM Harness task", "harness|hendrycksTest-high_school_psychology": "LM Harness task", "harness|hendrycksTest-high_school_statistics": "LM Harness task", "harness|hendrycksTest-high_school_us_history": "LM Harness task", "harness|hendrycksTest-high_school_world_history": "LM Harness task", "harness|hendrycksTest-human_aging": "LM Harness task", "harness|hendrycksTest-human_sexuality": "LM Harness task", "harness|hendrycksTest-international_law": "LM Harness task", "harness|hendrycksTest-jurisprudence": "LM Harness task", "harness|hendrycksTest-logical_fallacies": "LM Harness task", "harness|hendrycksTest-machine_learning": "LM Harness task", "harness|hendrycksTest-management": "LM Harness task", "harness|hendrycksTest-marketing": "LM Harness task", "harness|hendrycksTest-medical_genetics": "LM Harness task", "harness|hendrycksTest-miscellaneous": "LM Harness task", "harness|hendrycksTest-moral_disputes": "LM Harness task", "harness|hendrycksTest-moral_scenarios": "LM Harness task", "harness|hendrycksTest-nutrition": "LM Harness task", "harness|hendrycksTest-philosophy": "LM Harness task", "harness|hendrycksTest-prehistory": "LM Harness task", "harness|hendrycksTest-professional_accounting": "LM Harness task", "harness|hendrycksTest-professional_law": "LM Harness task", "harness|hendrycksTest-professional_medicine": "LM Harness task", "harness|hendrycksTest-professional_psychology": "LM Harness task", "harness|hendrycksTest-public_relations": "LM Harness task", "harness|hendrycksTest-security_studies": "LM Harness task", "harness|hendrycksTest-sociology": "LM Harness task", "harness|hendrycksTest-us_foreign_policy": "LM Harness task", "harness|hendrycksTest-virology": "LM Harness task", "harness|hendrycksTest-world_religions": "LM Harness task", "harness|truthfulqa:mc": "LM Harness task", "harness|winogrande": "LM Harness task" }
{ "harness|arc:challenge|25": { "hashes": { "hash_examples": "17b0cae357c0259e", "hash_full_prompts": "045cbb916e5145c6", "hash_input_tokens": "a86de36cca2a19b9", "hash_cont_tokens": "402adfa0ed1abfe3" }, "truncated": 0, "non_truncated": 1172, "padded": 4687, "non_padded": 0, "effective_few_shots": 25, "num_truncated_few_shots": 0 }, "harness|hellaswag|10": { "hashes": { "hash_examples": "e1768ecb99d7ecf0", "hash_full_prompts": "0b4c16983130f84f", "hash_input_tokens": "9e46720a9638c8a4", "hash_cont_tokens": "5856e609c5b49c4f" }, "truncated": 0, "non_truncated": 10042, "padded": 40068, "non_padded": 100, "effective_few_shots": 10, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-abstract_algebra|5": { "hashes": { "hash_examples": "280f9f325b40559a", "hash_full_prompts": "2f776a367d23aea2", "hash_input_tokens": "00dc12ab60f18dd3", "hash_cont_tokens": "bc75e4dffef3dc0e" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-anatomy|5": { "hashes": { "hash_examples": "2f83a4f1cab4ba18", "hash_full_prompts": "516f74bef25df620", "hash_input_tokens": "5b71f0137904b4fd", "hash_cont_tokens": "f9dae0f98ef7c0f2" }, "truncated": 0, "non_truncated": 135, "padded": 540, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-astronomy|5": { "hashes": { "hash_examples": "7d587b908da4d762", "hash_full_prompts": "faf4e80f65de93ca", "hash_input_tokens": "eef2a8a18c3925c0", "hash_cont_tokens": "dff84e206d2f1e0d" }, "truncated": 0, "non_truncated": 152, "padded": 608, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-business_ethics|5": { "hashes": { "hash_examples": "33e51740670de686", "hash_full_prompts": "db01c3ef8e1479d4", "hash_input_tokens": "c7c3799588097fc1", "hash_cont_tokens": "bc75e4dffef3dc0e" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-clinical_knowledge|5": { "hashes": { "hash_examples": "f3366dbe7eefffa4", "hash_full_prompts": "49654f71d94b65c3", "hash_input_tokens": "0da466ef69c2c211", "hash_cont_tokens": "b81dd170f83789d1" }, "truncated": 0, "non_truncated": 265, "padded": 1060, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_biology|5": { "hashes": { "hash_examples": "ca2b6753a0193e7f", "hash_full_prompts": "2b460b75f1fdfefd", "hash_input_tokens": "ea7865285fa63718", "hash_cont_tokens": "85c3400292af3bb8" }, "truncated": 0, "non_truncated": 144, "padded": 576, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_chemistry|5": { "hashes": { "hash_examples": "22ff85f1d34f42d1", "hash_full_prompts": "242c9be6da583e95", "hash_input_tokens": "551968a6bc1e1c69", "hash_cont_tokens": "bc75e4dffef3dc0e" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_computer_science|5": { "hashes": { "hash_examples": "30318289d717a5cf", "hash_full_prompts": "ed2bdb4e87c4b371", "hash_input_tokens": "12804011678b362d", "hash_cont_tokens": "bc75e4dffef3dc0e" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_mathematics|5": { "hashes": { "hash_examples": "4944d1f0b6b5d911", "hash_full_prompts": "770bc4281c973190", "hash_input_tokens": "4bd091031fc263d9", "hash_cont_tokens": "bc75e4dffef3dc0e" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_medicine|5": { "hashes": { "hash_examples": "dd69cc33381275af", "hash_full_prompts": "ad2a53e5250ab46e", "hash_input_tokens": "7532f5d07c6debfd", "hash_cont_tokens": "e5cb48f872b79ee7" }, "truncated": 0, "non_truncated": 173, "padded": 692, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_physics|5": { "hashes": { "hash_examples": "875dd26d22655b0d", "hash_full_prompts": "833a0d7b55aed500", "hash_input_tokens": "091beb94392a1731", "hash_cont_tokens": "40862171591ad909" }, "truncated": 0, "non_truncated": 102, "padded": 408, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-computer_security|5": { "hashes": { "hash_examples": "006451eedc0ededb", "hash_full_prompts": "94034c97e85d8f46", "hash_input_tokens": "d09375fff8e916d5", "hash_cont_tokens": "bc75e4dffef3dc0e" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-conceptual_physics|5": { "hashes": { "hash_examples": "8874ece872d2ca4c", "hash_full_prompts": "e40d15a34640d6fa", "hash_input_tokens": "ac72ede0b36aabf2", "hash_cont_tokens": "36bb2a47e8ff1bd8" }, "truncated": 0, "non_truncated": 235, "padded": 940, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-econometrics|5": { "hashes": { "hash_examples": "64d3623b0bfaa43f", "hash_full_prompts": "612f340fae41338d", "hash_input_tokens": "0a3072da09eaf315", "hash_cont_tokens": "433685e9aa542c2d" }, "truncated": 0, "non_truncated": 114, "padded": 456, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-electrical_engineering|5": { "hashes": { "hash_examples": "e98f51780c674d7e", "hash_full_prompts": "10275b312d812ae6", "hash_input_tokens": "45dcd2a8820fad20", "hash_cont_tokens": "f086b291b3aa0628" }, "truncated": 0, "non_truncated": 145, "padded": 576, "non_padded": 4, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-elementary_mathematics|5": { "hashes": { "hash_examples": "fc48208a5ac1c0ce", "hash_full_prompts": "5ec274c6c82aca23", "hash_input_tokens": "73321a8a08f43d2f", "hash_cont_tokens": "4f402da407619e4d" }, "truncated": 0, "non_truncated": 378, "padded": 1512, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-formal_logic|5": { "hashes": { "hash_examples": "5a6525665f63ea72", "hash_full_prompts": "07b92638c4a6b500", "hash_input_tokens": "ef395842ce6008ce", "hash_cont_tokens": "80d8e3e54d900608" }, "truncated": 0, "non_truncated": 126, "padded": 504, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-global_facts|5": { "hashes": { "hash_examples": "371d70d743b2b89b", "hash_full_prompts": "332fdee50a1921b4", "hash_input_tokens": "f98b91cdb7b86749", "hash_cont_tokens": "bc75e4dffef3dc0e" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_biology|5": { "hashes": { "hash_examples": "a79e1018b1674052", "hash_full_prompts": "e624e26ede922561", "hash_input_tokens": "94f2ac3fa39ac4c7", "hash_cont_tokens": "e07819899bd63630" }, "truncated": 0, "non_truncated": 310, "padded": 1240, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_chemistry|5": { "hashes": { "hash_examples": "44bfc25c389f0e03", "hash_full_prompts": "0e3e5f5d9246482a", "hash_input_tokens": "1036da676d11ad62", "hash_cont_tokens": "eb6259a94d61e372" }, "truncated": 0, "non_truncated": 203, "padded": 812, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_computer_science|5": { "hashes": { "hash_examples": "8b8cdb1084f24169", "hash_full_prompts": "c00487e67c1813cc", "hash_input_tokens": "f40b47b509c459ae", "hash_cont_tokens": "bc75e4dffef3dc0e" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_european_history|5": { "hashes": { "hash_examples": "11cd32d0ef440171", "hash_full_prompts": "318f4513c537c6bf", "hash_input_tokens": "ca2a0a3cdee71062", "hash_cont_tokens": "c3336566c025bc59" }, "truncated": 0, "non_truncated": 165, "padded": 656, "non_padded": 4, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_geography|5": { "hashes": { "hash_examples": "b60019b9e80b642f", "hash_full_prompts": "ee5789fcc1a81b1e", "hash_input_tokens": "d2a95c354bd5bce3", "hash_cont_tokens": "999a32d098465441" }, "truncated": 0, "non_truncated": 198, "padded": 792, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "hashes": { "hash_examples": "d221ec983d143dc3", "hash_full_prompts": "ac42d888e1ce1155", "hash_input_tokens": "477712b69094d77b", "hash_cont_tokens": "361410848e01f8ed" }, "truncated": 0, "non_truncated": 193, "padded": 772, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "hashes": { "hash_examples": "59c2915cacfd3fbb", "hash_full_prompts": "c6bd9d25158abd0e", "hash_input_tokens": "ea00f00108f471d1", "hash_cont_tokens": "18f9ae57b2444806" }, "truncated": 0, "non_truncated": 390, "padded": 1560, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_mathematics|5": { "hashes": { "hash_examples": "1f8ac897608de342", "hash_full_prompts": "5d88f41fc2d643a8", "hash_input_tokens": "6771092a57f1064b", "hash_cont_tokens": "a13496e646060699" }, "truncated": 0, "non_truncated": 270, "padded": 1080, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_microeconomics|5": { "hashes": { "hash_examples": "ead6a0f2f6c83370", "hash_full_prompts": "bfc393381298609e", "hash_input_tokens": "6434ce770cc3a07d", "hash_cont_tokens": "791a7a25f0571e59" }, "truncated": 0, "non_truncated": 238, "padded": 952, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_physics|5": { "hashes": { "hash_examples": "c3f2025990afec64", "hash_full_prompts": "fc78b4997e436734", "hash_input_tokens": "9b84202a0e20279e", "hash_cont_tokens": "9677b0687811cf73" }, "truncated": 0, "non_truncated": 151, "padded": 604, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_psychology|5": { "hashes": { "hash_examples": "21f8aab618f6d636", "hash_full_prompts": "d5c76aa40b9dbc43", "hash_input_tokens": "cbd1c4c25d9a95e1", "hash_cont_tokens": "6393201d9136920e" }, "truncated": 0, "non_truncated": 545, "padded": 2180, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_statistics|5": { "hashes": { "hash_examples": "2386a60a11fc5de3", "hash_full_prompts": "4c5c8be5aafac432", "hash_input_tokens": "a9d9974081f33401", "hash_cont_tokens": "17caccbb3a38c7bf" }, "truncated": 0, "non_truncated": 216, "padded": 864, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_us_history|5": { "hashes": { "hash_examples": "74961543be40f04f", "hash_full_prompts": "5d5ca4840131ba21", "hash_input_tokens": "4ea19e6b2da621ca", "hash_cont_tokens": "7128e2eeb930d3b3" }, "truncated": 0, "non_truncated": 204, "padded": 816, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_world_history|5": { "hashes": { "hash_examples": "2ad2f6b7198b2234", "hash_full_prompts": "11845057459afd72", "hash_input_tokens": "93e06ed8cb44fcb2", "hash_cont_tokens": "48e22ae63ee54721" }, "truncated": 0, "non_truncated": 237, "padded": 948, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-human_aging|5": { "hashes": { "hash_examples": "1a7199dc733e779b", "hash_full_prompts": "756b9096b8eaf892", "hash_input_tokens": "10b1be4021766536", "hash_cont_tokens": "0f40704815d5b3f6" }, "truncated": 0, "non_truncated": 223, "padded": 892, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-human_sexuality|5": { "hashes": { "hash_examples": "7acb8fdad97f88a6", "hash_full_prompts": "731a52ff15b8cfdb", "hash_input_tokens": "6e511aceb2a5cc1f", "hash_cont_tokens": "a9fdf5917bdddc9b" }, "truncated": 0, "non_truncated": 131, "padded": 524, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-international_law|5": { "hashes": { "hash_examples": "1300bfd0dfc59114", "hash_full_prompts": "db2aefbff5eec996", "hash_input_tokens": "f2250000a60c4675", "hash_cont_tokens": "c63e45a81fbe97b2" }, "truncated": 0, "non_truncated": 121, "padded": 484, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-jurisprudence|5": { "hashes": { "hash_examples": "083b1e4904c48dc2", "hash_full_prompts": "0f89ee3fe03d6a21", "hash_input_tokens": "1d37f2053687bf09", "hash_cont_tokens": "9df89edb95ea3c08" }, "truncated": 0, "non_truncated": 108, "padded": 428, "non_padded": 4, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-logical_fallacies|5": { "hashes": { "hash_examples": "709128f9926a634c", "hash_full_prompts": "98a04b1f8f841069", "hash_input_tokens": "50a65f6db7781df6", "hash_cont_tokens": "5b4f21454680a984" }, "truncated": 0, "non_truncated": 163, "padded": 652, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-machine_learning|5": { "hashes": { "hash_examples": "88f22a636029ae47", "hash_full_prompts": "2e1c8d4b1e0cc921", "hash_input_tokens": "0214f9e954e7fcf7", "hash_cont_tokens": "0c2fc7f9e9101fbb" }, "truncated": 0, "non_truncated": 112, "padded": 448, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-management|5": { "hashes": { "hash_examples": "8c8a1e07a2151dca", "hash_full_prompts": "f51611f514b265b0", "hash_input_tokens": "3fc286ea903dc9e1", "hash_cont_tokens": "1279a23b3bc7b32c" }, "truncated": 0, "non_truncated": 103, "padded": 412, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-marketing|5": { "hashes": { "hash_examples": "2668953431f91e96", "hash_full_prompts": "77562bef997c7650", "hash_input_tokens": "dfa2c9b7866c93e6", "hash_cont_tokens": "be76778b3b861344" }, "truncated": 0, "non_truncated": 234, "padded": 936, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-medical_genetics|5": { "hashes": { "hash_examples": "9c2dda34a2ea4fd2", "hash_full_prompts": "202139046daa118f", "hash_input_tokens": "642d259108067cec", "hash_cont_tokens": "bc75e4dffef3dc0e" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-miscellaneous|5": { "hashes": { "hash_examples": "41adb694024809c2", "hash_full_prompts": "bffec9fc237bcf93", "hash_input_tokens": "1f55d640e75559b3", "hash_cont_tokens": "c61a0f86b50f0556" }, "truncated": 0, "non_truncated": 783, "padded": 3132, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-moral_disputes|5": { "hashes": { "hash_examples": "3171c13ba3c594c4", "hash_full_prompts": "170831fc36f1d59e", "hash_input_tokens": "11599cd92aca75c2", "hash_cont_tokens": "a208a34c74088f6c" }, "truncated": 0, "non_truncated": 346, "padded": 1384, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-moral_scenarios|5": { "hashes": { "hash_examples": "9873e077e83e0546", "hash_full_prompts": "08f4ceba3131a068", "hash_input_tokens": "9965b932ec67e2ff", "hash_cont_tokens": "996ce7a5b6c4aef1" }, "truncated": 0, "non_truncated": 895, "padded": 3580, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-nutrition|5": { "hashes": { "hash_examples": "7db1d8142ec14323", "hash_full_prompts": "4c0e68e3586cb453", "hash_input_tokens": "1da6449a92c60335", "hash_cont_tokens": "9d4280b06a73f2ad" }, "truncated": 0, "non_truncated": 306, "padded": 1224, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-philosophy|5": { "hashes": { "hash_examples": "9b455b7d72811cc8", "hash_full_prompts": "e467f822d8a0d3ff", "hash_input_tokens": "eacd0118cde3a6b6", "hash_cont_tokens": "9a708d21688a0b16" }, "truncated": 0, "non_truncated": 311, "padded": 1244, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-prehistory|5": { "hashes": { "hash_examples": "8be90d0f538f1560", "hash_full_prompts": "152187949bcd0921", "hash_input_tokens": "900a2e857049c7fb", "hash_cont_tokens": "ed0ff6b6c4caf978" }, "truncated": 0, "non_truncated": 324, "padded": 1296, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_accounting|5": { "hashes": { "hash_examples": "8d377597916cd07e", "hash_full_prompts": "0eb7345d6144ee0d", "hash_input_tokens": "2368119814fe27da", "hash_cont_tokens": "4fd1a023ef90b43a" }, "truncated": 0, "non_truncated": 282, "padded": 1120, "non_padded": 8, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_law|5": { "hashes": { "hash_examples": "cd9dbc52b3c932d6", "hash_full_prompts": "36ac764272bfb182", "hash_input_tokens": "ba0b150921d1354f", "hash_cont_tokens": "d2c1c75d7c0e6ec5" }, "truncated": 0, "non_truncated": 1534, "padded": 6136, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_medicine|5": { "hashes": { "hash_examples": "b20e4e816c1e383e", "hash_full_prompts": "7b8d69ea2acaf2f7", "hash_input_tokens": "b80d6f9095fb702f", "hash_cont_tokens": "ff4c3ef8a56efe40" }, "truncated": 0, "non_truncated": 272, "padded": 1088, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_psychology|5": { "hashes": { "hash_examples": "d45b73b22f9cc039", "hash_full_prompts": "fe8937e9ffc99771", "hash_input_tokens": "10151b922fe9fdba", "hash_cont_tokens": "b4566ef91a66db7d" }, "truncated": 0, "non_truncated": 612, "padded": 2448, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-public_relations|5": { "hashes": { "hash_examples": "0d25072e1761652a", "hash_full_prompts": "f9adc39cfa9f42ba", "hash_input_tokens": "ed0c12fa575d30f6", "hash_cont_tokens": "b713ae56c89df822" }, "truncated": 0, "non_truncated": 110, "padded": 440, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-security_studies|5": { "hashes": { "hash_examples": "62bb8197e63d60d4", "hash_full_prompts": "869c9c3ae196b7c3", "hash_input_tokens": "49436381f9054ab9", "hash_cont_tokens": "89baef8c4b642ed0" }, "truncated": 0, "non_truncated": 245, "padded": 980, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-sociology|5": { "hashes": { "hash_examples": "e7959df87dea8672", "hash_full_prompts": "1a1fc00e17b3a52a", "hash_input_tokens": "9aff724e413681b7", "hash_cont_tokens": "b92ed9d8dde61395" }, "truncated": 0, "non_truncated": 201, "padded": 784, "non_padded": 20, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-us_foreign_policy|5": { "hashes": { "hash_examples": "4a56a01ddca44dca", "hash_full_prompts": "0c7a7081c71c07b6", "hash_input_tokens": "9103b692a946fc09", "hash_cont_tokens": "bc75e4dffef3dc0e" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-virology|5": { "hashes": { "hash_examples": "451cc86a8c4f4fe9", "hash_full_prompts": "01e95325d8b738e4", "hash_input_tokens": "6dc6ade73ee63cae", "hash_cont_tokens": "1c1bf88d7c979ef5" }, "truncated": 0, "non_truncated": 166, "padded": 664, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-world_religions|5": { "hashes": { "hash_examples": "3b29cfaf1a81c379", "hash_full_prompts": "e0d79a15083dfdff", "hash_input_tokens": "92ed8eba1ceb58b4", "hash_cont_tokens": "9fbfaba067301be2" }, "truncated": 0, "non_truncated": 171, "padded": 684, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|truthfulqa:mc|0": { "hashes": { "hash_examples": "23176c0531c7b867", "hash_full_prompts": "36a6d90e75d92d4a", "hash_input_tokens": "405dc01724068f4f", "hash_cont_tokens": "2aa05ab785b97e1d" }, "truncated": 0, "non_truncated": 817, "padded": 9996, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "harness|winogrande|5": { "hashes": { "hash_examples": "aada0a176fd81218", "hash_full_prompts": "c8655cbd12de8409", "hash_input_tokens": "e512c1d089d1c425", "hash_cont_tokens": "e5da1ddee7e80213" }, "truncated": 0, "non_truncated": 1267, "padded": 2534, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|gsm8k|5": { "hashes": { "hash_examples": "4c0843a5d99bcfdc", "hash_full_prompts": "41d55e83abc0e02d", "hash_input_tokens": "3b8275f3fce8067b", "hash_cont_tokens": "0d206a3f326288d8" }, "truncated": 0, "non_truncated": 1319, "padded": 0, "non_padded": 1319, "effective_few_shots": 5, "num_truncated_few_shots": 0 } }
{ "hashes": { "hash_examples": "3b7fa57a057f9415", "hash_full_prompts": "63615fc50fc9417c", "hash_input_tokens": "44ee6f861f86ab7d", "hash_cont_tokens": "63510439853388bf" }, "truncated": 0, "non_truncated": 28659, "padded": 113413, "non_padded": 1459, "num_truncated_few_shots": 0 }
{ "lighteval_sha": "494ee12240e716e804ae9ea834f84a2c864c07ca", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null, "job_id": "", "start_time": 1009823.331966069, "end_time": 1033923.596197144, "total_evaluation_time_secondes": "24100.264231075067", "model_name": "cloudyu/TomGrc_FusionNet_34Bx2_MoE_v0.1_DPO_f16", "model_sha": "cd29cfa124072c96ba8601230bead65d76e04dcb", "model_dtype": "torch.bfloat16", "model_size": "119.0 GB" }
{ "harness|arc:challenge|25": { "acc": 0.7218430034129693, "acc_stderr": 0.0130944699195388, "acc_norm": 0.7406143344709898, "acc_norm_stderr": 0.012808273573927094 }, "harness|hellaswag|10": { "acc": 0.6701852220673172, "acc_stderr": 0.004691848665399069, "acc_norm": 0.8673571001792472, "acc_norm_stderr": 0.003384951803213475 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.7407407407407407, "acc_stderr": 0.03785714465066653, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.03785714465066653 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.875, "acc_stderr": 0.026913523521537846, "acc_norm": 0.875, "acc_norm_stderr": 0.026913523521537846 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.8075471698113208, "acc_stderr": 0.024262979839372274, "acc_norm": 0.8075471698113208, "acc_norm_stderr": 0.024262979839372274 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8958333333333334, "acc_stderr": 0.025545239210256917, "acc_norm": 0.8958333333333334, "acc_norm_stderr": 0.025545239210256917 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.44, "acc_stderr": 0.0498887651569859, "acc_norm": 0.44, "acc_norm_stderr": 0.0498887651569859 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7167630057803468, "acc_stderr": 0.034355680560478746, "acc_norm": 0.7167630057803468, "acc_norm_stderr": 0.034355680560478746 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.5196078431372549, "acc_stderr": 0.04971358884367406, "acc_norm": 0.5196078431372549, "acc_norm_stderr": 0.04971358884367406 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7617021276595745, "acc_stderr": 0.02785125297388977, "acc_norm": 0.7617021276595745, "acc_norm_stderr": 0.02785125297388977 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5964912280701754, "acc_stderr": 0.04615186962583707, "acc_norm": 0.5964912280701754, "acc_norm_stderr": 0.04615186962583707 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.7517241379310344, "acc_stderr": 0.036001056927277696, "acc_norm": 0.7517241379310344, "acc_norm_stderr": 0.036001056927277696 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.7433862433862434, "acc_stderr": 0.022494510767503154, "acc_norm": 0.7433862433862434, "acc_norm_stderr": 0.022494510767503154 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04444444444444449, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04444444444444449 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.9064516129032258, "acc_stderr": 0.016565754668270982, "acc_norm": 0.9064516129032258, "acc_norm_stderr": 0.016565754668270982 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.6650246305418719, "acc_stderr": 0.033208527423483104, "acc_norm": 0.6650246305418719, "acc_norm_stderr": 0.033208527423483104 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.77, "acc_stderr": 0.042295258468165044, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165044 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8848484848484849, "acc_stderr": 0.024925699798115344, "acc_norm": 0.8848484848484849, "acc_norm_stderr": 0.024925699798115344 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9292929292929293, "acc_stderr": 0.01826310542019949, "acc_norm": 0.9292929292929293, "acc_norm_stderr": 0.01826310542019949 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9740932642487047, "acc_stderr": 0.011464523356953162, "acc_norm": 0.9740932642487047, "acc_norm_stderr": 0.011464523356953162 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.8102564102564103, "acc_stderr": 0.019880165406588796, "acc_norm": 0.8102564102564103, "acc_norm_stderr": 0.019880165406588796 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.030296771286067323, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.030296771286067323 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.8361344537815126, "acc_stderr": 0.024044054940440488, "acc_norm": 0.8361344537815126, "acc_norm_stderr": 0.024044054940440488 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.5231788079470199, "acc_stderr": 0.04078093859163085, "acc_norm": 0.5231788079470199, "acc_norm_stderr": 0.04078093859163085 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9229357798165138, "acc_stderr": 0.011434381698911096, "acc_norm": 0.9229357798165138, "acc_norm_stderr": 0.011434381698911096 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6712962962962963, "acc_stderr": 0.032036140846700596, "acc_norm": 0.6712962962962963, "acc_norm_stderr": 0.032036140846700596 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9264705882352942, "acc_stderr": 0.018318855850089678, "acc_norm": 0.9264705882352942, "acc_norm_stderr": 0.018318855850089678 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9113924050632911, "acc_stderr": 0.018498315206865384, "acc_norm": 0.9113924050632911, "acc_norm_stderr": 0.018498315206865384 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8026905829596412, "acc_stderr": 0.02670985334496796, "acc_norm": 0.8026905829596412, "acc_norm_stderr": 0.02670985334496796 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8702290076335878, "acc_stderr": 0.029473649496907065, "acc_norm": 0.8702290076335878, "acc_norm_stderr": 0.029473649496907065 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8842975206611571, "acc_stderr": 0.02919980245562281, "acc_norm": 0.8842975206611571, "acc_norm_stderr": 0.02919980245562281 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8981481481481481, "acc_stderr": 0.02923927267563275, "acc_norm": 0.8981481481481481, "acc_norm_stderr": 0.02923927267563275 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8773006134969326, "acc_stderr": 0.025777328426978927, "acc_norm": 0.8773006134969326, "acc_norm_stderr": 0.025777328426978927 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5446428571428571, "acc_stderr": 0.04726835553719098, "acc_norm": 0.5446428571428571, "acc_norm_stderr": 0.04726835553719098 }, "harness|hendrycksTest-management|5": { "acc": 0.8640776699029126, "acc_stderr": 0.0339329572976101, "acc_norm": 0.8640776699029126, "acc_norm_stderr": 0.0339329572976101 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9444444444444444, "acc_stderr": 0.01500631280644693, "acc_norm": 0.9444444444444444, "acc_norm_stderr": 0.01500631280644693 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.913154533844189, "acc_stderr": 0.01007029837774778, "acc_norm": 0.913154533844189, "acc_norm_stderr": 0.01007029837774778 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.8294797687861272, "acc_stderr": 0.020247961569303728, "acc_norm": 0.8294797687861272, "acc_norm_stderr": 0.020247961569303728 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.8, "acc_stderr": 0.013378001241813072, "acc_norm": 0.8, "acc_norm_stderr": 0.013378001241813072 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.8562091503267973, "acc_stderr": 0.02009118893604371, "acc_norm": 0.8562091503267973, "acc_norm_stderr": 0.02009118893604371 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7942122186495176, "acc_stderr": 0.022961339906764248, "acc_norm": 0.7942122186495176, "acc_norm_stderr": 0.022961339906764248 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8672839506172839, "acc_stderr": 0.01887735383957184, "acc_norm": 0.8672839506172839, "acc_norm_stderr": 0.01887735383957184 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6418439716312057, "acc_stderr": 0.028602085862759422, "acc_norm": 0.6418439716312057, "acc_norm_stderr": 0.028602085862759422 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.590612777053455, "acc_stderr": 0.012558780895570757, "acc_norm": 0.590612777053455, "acc_norm_stderr": 0.012558780895570757 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.8382352941176471, "acc_stderr": 0.022368672562886747, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.022368672562886747 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.8137254901960784, "acc_stderr": 0.01575052628436335, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.01575052628436335 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7181818181818181, "acc_stderr": 0.043091187099464585, "acc_norm": 0.7181818181818181, "acc_norm_stderr": 0.043091187099464585 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8530612244897959, "acc_stderr": 0.02266540041721764, "acc_norm": 0.8530612244897959, "acc_norm_stderr": 0.02266540041721764 }, "harness|hendrycksTest-sociology|5": { "acc": 0.9054726368159204, "acc_stderr": 0.020687186951534094, "acc_norm": 0.9054726368159204, "acc_norm_stderr": 0.020687186951534094 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.92, "acc_stderr": 0.0272659924344291, "acc_norm": 0.92, "acc_norm_stderr": 0.0272659924344291 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598053, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598053 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.02517298435015577, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.02517298435015577 }, "harness|truthfulqa:mc|0": { "mc1": 0.5556915544675642, "mc1_stderr": 0.017394586250743176, "mc2": 0.7224126373641326, "mc2_stderr": 0.014009811551091062 }, "harness|winogrande|5": { "acc": 0.8334648776637726, "acc_stderr": 0.010470796496781098 }, "harness|gsm8k|5": { "acc": 0.7445034116755117, "acc_stderr": 0.012013462405460067 }, "all": { "acc": 0.764901672440236, "acc_stderr": 0.02826230862515645, "acc_norm": 0.7677453718421197, "acc_norm_stderr": 0.02881226227160178, "mc1": 0.5556915544675642, "mc1_stderr": 0.017394586250743176, "mc2": 0.7224126373641326, "mc2_stderr": 0.014009811551091062 } }
{ "all": 0, "harness|arc:challenge|25": 0, "harness|gsm8k|5": 0, "harness|hellaswag|10": 0, "harness|hendrycksTest-abstract_algebra|5": 1, "harness|hendrycksTest-anatomy|5": 1, "harness|hendrycksTest-astronomy|5": 1, "harness|hendrycksTest-business_ethics|5": 1, "harness|hendrycksTest-clinical_knowledge|5": 1, "harness|hendrycksTest-college_biology|5": 1, "harness|hendrycksTest-college_chemistry|5": 1, "harness|hendrycksTest-college_computer_science|5": 1, "harness|hendrycksTest-college_mathematics|5": 1, "harness|hendrycksTest-college_medicine|5": 1, "harness|hendrycksTest-college_physics|5": 1, "harness|hendrycksTest-computer_security|5": 1, "harness|hendrycksTest-conceptual_physics|5": 1, "harness|hendrycksTest-econometrics|5": 1, "harness|hendrycksTest-electrical_engineering|5": 1, "harness|hendrycksTest-elementary_mathematics|5": 1, "harness|hendrycksTest-formal_logic|5": 1, "harness|hendrycksTest-global_facts|5": 1, "harness|hendrycksTest-high_school_biology|5": 1, "harness|hendrycksTest-high_school_chemistry|5": 1, "harness|hendrycksTest-high_school_computer_science|5": 1, "harness|hendrycksTest-high_school_european_history|5": 1, "harness|hendrycksTest-high_school_geography|5": 1, "harness|hendrycksTest-high_school_government_and_politics|5": 1, "harness|hendrycksTest-high_school_macroeconomics|5": 1, "harness|hendrycksTest-high_school_mathematics|5": 1, "harness|hendrycksTest-high_school_microeconomics|5": 1, "harness|hendrycksTest-high_school_physics|5": 1, "harness|hendrycksTest-high_school_psychology|5": 1, "harness|hendrycksTest-high_school_statistics|5": 1, "harness|hendrycksTest-high_school_us_history|5": 1, "harness|hendrycksTest-high_school_world_history|5": 1, "harness|hendrycksTest-human_aging|5": 1, "harness|hendrycksTest-human_sexuality|5": 1, "harness|hendrycksTest-international_law|5": 1, "harness|hendrycksTest-jurisprudence|5": 1, "harness|hendrycksTest-logical_fallacies|5": 1, "harness|hendrycksTest-machine_learning|5": 1, "harness|hendrycksTest-management|5": 1, "harness|hendrycksTest-marketing|5": 1, "harness|hendrycksTest-medical_genetics|5": 1, "harness|hendrycksTest-miscellaneous|5": 1, "harness|hendrycksTest-moral_disputes|5": 1, "harness|hendrycksTest-moral_scenarios|5": 1, "harness|hendrycksTest-nutrition|5": 1, "harness|hendrycksTest-philosophy|5": 1, "harness|hendrycksTest-prehistory|5": 1, "harness|hendrycksTest-professional_accounting|5": 1, "harness|hendrycksTest-professional_law|5": 1, "harness|hendrycksTest-professional_medicine|5": 1, "harness|hendrycksTest-professional_psychology|5": 1, "harness|hendrycksTest-public_relations|5": 1, "harness|hendrycksTest-security_studies|5": 1, "harness|hendrycksTest-sociology|5": 1, "harness|hendrycksTest-us_foreign_policy|5": 1, "harness|hendrycksTest-virology|5": 1, "harness|hendrycksTest-world_religions|5": 1, "harness|truthfulqa:mc|0": 1, "harness|winogrande|5": 0 }
{ "harness|arc:challenge": "LM Harness task", "harness|gsm8k": "LM Harness task", "harness|hellaswag": "LM Harness task", "harness|hendrycksTest-abstract_algebra": "LM Harness task", "harness|hendrycksTest-anatomy": "LM Harness task", "harness|hendrycksTest-astronomy": "LM Harness task", "harness|hendrycksTest-business_ethics": "LM Harness task", "harness|hendrycksTest-clinical_knowledge": "LM Harness task", "harness|hendrycksTest-college_biology": "LM Harness task", "harness|hendrycksTest-college_chemistry": "LM Harness task", "harness|hendrycksTest-college_computer_science": "LM Harness task", "harness|hendrycksTest-college_mathematics": "LM Harness task", "harness|hendrycksTest-college_medicine": "LM Harness task", "harness|hendrycksTest-college_physics": "LM Harness task", "harness|hendrycksTest-computer_security": "LM Harness task", "harness|hendrycksTest-conceptual_physics": "LM Harness task", "harness|hendrycksTest-econometrics": "LM Harness task", "harness|hendrycksTest-electrical_engineering": "LM Harness task", "harness|hendrycksTest-elementary_mathematics": "LM Harness task", "harness|hendrycksTest-formal_logic": "LM Harness task", "harness|hendrycksTest-global_facts": "LM Harness task", "harness|hendrycksTest-high_school_biology": "LM Harness task", "harness|hendrycksTest-high_school_chemistry": "LM Harness task", "harness|hendrycksTest-high_school_computer_science": "LM Harness task", "harness|hendrycksTest-high_school_european_history": "LM Harness task", "harness|hendrycksTest-high_school_geography": "LM Harness task", "harness|hendrycksTest-high_school_government_and_politics": "LM Harness task", "harness|hendrycksTest-high_school_macroeconomics": "LM Harness task", "harness|hendrycksTest-high_school_mathematics": "LM Harness task", "harness|hendrycksTest-high_school_microeconomics": "LM Harness task", "harness|hendrycksTest-high_school_physics": "LM Harness task", "harness|hendrycksTest-high_school_psychology": "LM Harness task", "harness|hendrycksTest-high_school_statistics": "LM Harness task", "harness|hendrycksTest-high_school_us_history": "LM Harness task", "harness|hendrycksTest-high_school_world_history": "LM Harness task", "harness|hendrycksTest-human_aging": "LM Harness task", "harness|hendrycksTest-human_sexuality": "LM Harness task", "harness|hendrycksTest-international_law": "LM Harness task", "harness|hendrycksTest-jurisprudence": "LM Harness task", "harness|hendrycksTest-logical_fallacies": "LM Harness task", "harness|hendrycksTest-machine_learning": "LM Harness task", "harness|hendrycksTest-management": "LM Harness task", "harness|hendrycksTest-marketing": "LM Harness task", "harness|hendrycksTest-medical_genetics": "LM Harness task", "harness|hendrycksTest-miscellaneous": "LM Harness task", "harness|hendrycksTest-moral_disputes": "LM Harness task", "harness|hendrycksTest-moral_scenarios": "LM Harness task", "harness|hendrycksTest-nutrition": "LM Harness task", "harness|hendrycksTest-philosophy": "LM Harness task", "harness|hendrycksTest-prehistory": "LM Harness task", "harness|hendrycksTest-professional_accounting": "LM Harness task", "harness|hendrycksTest-professional_law": "LM Harness task", "harness|hendrycksTest-professional_medicine": "LM Harness task", "harness|hendrycksTest-professional_psychology": "LM Harness task", "harness|hendrycksTest-public_relations": "LM Harness task", "harness|hendrycksTest-security_studies": "LM Harness task", "harness|hendrycksTest-sociology": "LM Harness task", "harness|hendrycksTest-us_foreign_policy": "LM Harness task", "harness|hendrycksTest-virology": "LM Harness task", "harness|hendrycksTest-world_religions": "LM Harness task", "harness|truthfulqa:mc": "LM Harness task", "harness|winogrande": "LM Harness task" }
{ "harness|arc:challenge|25": { "hashes": { "hash_examples": "17b0cae357c0259e", "hash_full_prompts": "045cbb916e5145c6", "hash_input_tokens": "f52f7134dd4e8235", "hash_cont_tokens": "e23c779c4c2dd1ec" }, "truncated": 0, "non_truncated": 1172, "padded": 4682, "non_padded": 5, "effective_few_shots": 25, "num_truncated_few_shots": 0 }, "harness|hellaswag|10": { "hashes": { "hash_examples": "e1768ecb99d7ecf0", "hash_full_prompts": "0b4c16983130f84f", "hash_input_tokens": "8380af90422a117e", "hash_cont_tokens": "55da5ba61989a8fe" }, "truncated": 0, "non_truncated": 10042, "padded": 40097, "non_padded": 71, "effective_few_shots": 10, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-abstract_algebra|5": { "hashes": { "hash_examples": "280f9f325b40559a", "hash_full_prompts": "2f776a367d23aea2", "hash_input_tokens": "9185dc38dcc328ea", "hash_cont_tokens": "bcc22fd85dcc85e9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-anatomy|5": { "hashes": { "hash_examples": "2f83a4f1cab4ba18", "hash_full_prompts": "516f74bef25df620", "hash_input_tokens": "90fdbbaaf0213cec", "hash_cont_tokens": "5cc800feae9fa1ad" }, "truncated": 0, "non_truncated": 135, "padded": 540, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-astronomy|5": { "hashes": { "hash_examples": "7d587b908da4d762", "hash_full_prompts": "faf4e80f65de93ca", "hash_input_tokens": "cbe1c711494076b6", "hash_cont_tokens": "655dbb90034f484a" }, "truncated": 0, "non_truncated": 152, "padded": 608, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-business_ethics|5": { "hashes": { "hash_examples": "33e51740670de686", "hash_full_prompts": "db01c3ef8e1479d4", "hash_input_tokens": "09397035a4a73e5f", "hash_cont_tokens": "bcc22fd85dcc85e9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-clinical_knowledge|5": { "hashes": { "hash_examples": "f3366dbe7eefffa4", "hash_full_prompts": "49654f71d94b65c3", "hash_input_tokens": "90c311de52544438", "hash_cont_tokens": "f77b74d946d7fc02" }, "truncated": 0, "non_truncated": 265, "padded": 1060, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_biology|5": { "hashes": { "hash_examples": "ca2b6753a0193e7f", "hash_full_prompts": "2b460b75f1fdfefd", "hash_input_tokens": "d8fd4e3af4ae46c3", "hash_cont_tokens": "1ba4b1a158d8bf3f" }, "truncated": 0, "non_truncated": 144, "padded": 576, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_chemistry|5": { "hashes": { "hash_examples": "22ff85f1d34f42d1", "hash_full_prompts": "242c9be6da583e95", "hash_input_tokens": "da514a10083e8e97", "hash_cont_tokens": "bcc22fd85dcc85e9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_computer_science|5": { "hashes": { "hash_examples": "30318289d717a5cf", "hash_full_prompts": "ed2bdb4e87c4b371", "hash_input_tokens": "7ccea65975bb46d4", "hash_cont_tokens": "bcc22fd85dcc85e9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_mathematics|5": { "hashes": { "hash_examples": "4944d1f0b6b5d911", "hash_full_prompts": "770bc4281c973190", "hash_input_tokens": "8ea8585f6adc2650", "hash_cont_tokens": "bcc22fd85dcc85e9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_medicine|5": { "hashes": { "hash_examples": "dd69cc33381275af", "hash_full_prompts": "ad2a53e5250ab46e", "hash_input_tokens": "9d07c6e852253252", "hash_cont_tokens": "78a0ebf66d91c5cf" }, "truncated": 0, "non_truncated": 173, "padded": 692, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-college_physics|5": { "hashes": { "hash_examples": "875dd26d22655b0d", "hash_full_prompts": "833a0d7b55aed500", "hash_input_tokens": "0d3d540477f9eddb", "hash_cont_tokens": "5a030c95824fdbe5" }, "truncated": 0, "non_truncated": 102, "padded": 408, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-computer_security|5": { "hashes": { "hash_examples": "006451eedc0ededb", "hash_full_prompts": "94034c97e85d8f46", "hash_input_tokens": "5ebc754afaa1fac8", "hash_cont_tokens": "bcc22fd85dcc85e9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-conceptual_physics|5": { "hashes": { "hash_examples": "8874ece872d2ca4c", "hash_full_prompts": "e40d15a34640d6fa", "hash_input_tokens": "7780b9cde8badacb", "hash_cont_tokens": "2326dc60d0bc41b6" }, "truncated": 0, "non_truncated": 235, "padded": 940, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-econometrics|5": { "hashes": { "hash_examples": "64d3623b0bfaa43f", "hash_full_prompts": "612f340fae41338d", "hash_input_tokens": "8acec1576892f7ab", "hash_cont_tokens": "be908364b6f14dd6" }, "truncated": 0, "non_truncated": 114, "padded": 456, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-electrical_engineering|5": { "hashes": { "hash_examples": "e98f51780c674d7e", "hash_full_prompts": "10275b312d812ae6", "hash_input_tokens": "e0321889f63f18d7", "hash_cont_tokens": "179280ef597fe1bf" }, "truncated": 0, "non_truncated": 145, "padded": 564, "non_padded": 16, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-elementary_mathematics|5": { "hashes": { "hash_examples": "fc48208a5ac1c0ce", "hash_full_prompts": "5ec274c6c82aca23", "hash_input_tokens": "60e497887b9e2608", "hash_cont_tokens": "95cdcdaf1abd0bd2" }, "truncated": 0, "non_truncated": 378, "padded": 1512, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-formal_logic|5": { "hashes": { "hash_examples": "5a6525665f63ea72", "hash_full_prompts": "07b92638c4a6b500", "hash_input_tokens": "53adc0607e358206", "hash_cont_tokens": "6a4818f3c307c346" }, "truncated": 0, "non_truncated": 126, "padded": 504, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-global_facts|5": { "hashes": { "hash_examples": "371d70d743b2b89b", "hash_full_prompts": "332fdee50a1921b4", "hash_input_tokens": "34682f752c1a1ac4", "hash_cont_tokens": "bcc22fd85dcc85e9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_biology|5": { "hashes": { "hash_examples": "a79e1018b1674052", "hash_full_prompts": "e624e26ede922561", "hash_input_tokens": "bb5cc287970e5c14", "hash_cont_tokens": "36d0d84455f0bdba" }, "truncated": 0, "non_truncated": 310, "padded": 1240, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_chemistry|5": { "hashes": { "hash_examples": "44bfc25c389f0e03", "hash_full_prompts": "0e3e5f5d9246482a", "hash_input_tokens": "b12197fdbc9a45f0", "hash_cont_tokens": "c678f794a9b8ee74" }, "truncated": 0, "non_truncated": 203, "padded": 812, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_computer_science|5": { "hashes": { "hash_examples": "8b8cdb1084f24169", "hash_full_prompts": "c00487e67c1813cc", "hash_input_tokens": "36408b638d9d7a8d", "hash_cont_tokens": "bcc22fd85dcc85e9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_european_history|5": { "hashes": { "hash_examples": "11cd32d0ef440171", "hash_full_prompts": "318f4513c537c6bf", "hash_input_tokens": "652bd20e505a2826", "hash_cont_tokens": "e9c94304326d875c" }, "truncated": 0, "non_truncated": 165, "padded": 656, "non_padded": 4, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_geography|5": { "hashes": { "hash_examples": "b60019b9e80b642f", "hash_full_prompts": "ee5789fcc1a81b1e", "hash_input_tokens": "8f4cd01faf05c6f1", "hash_cont_tokens": "f937a1349eb483eb" }, "truncated": 0, "non_truncated": 198, "padded": 792, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "hashes": { "hash_examples": "d221ec983d143dc3", "hash_full_prompts": "ac42d888e1ce1155", "hash_input_tokens": "217861435fcb5576", "hash_cont_tokens": "8b27dd3907d25b4e" }, "truncated": 0, "non_truncated": 193, "padded": 772, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "hashes": { "hash_examples": "59c2915cacfd3fbb", "hash_full_prompts": "c6bd9d25158abd0e", "hash_input_tokens": "bcedb3cf953f812f", "hash_cont_tokens": "3763cae29e2f938c" }, "truncated": 0, "non_truncated": 390, "padded": 1560, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_mathematics|5": { "hashes": { "hash_examples": "1f8ac897608de342", "hash_full_prompts": "5d88f41fc2d643a8", "hash_input_tokens": "52affce916d66c97", "hash_cont_tokens": "fd7b555352d765a4" }, "truncated": 0, "non_truncated": 270, "padded": 1080, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_microeconomics|5": { "hashes": { "hash_examples": "ead6a0f2f6c83370", "hash_full_prompts": "bfc393381298609e", "hash_input_tokens": "b9d29201856d353d", "hash_cont_tokens": "61f46d4a209b9aa2" }, "truncated": 0, "non_truncated": 238, "padded": 952, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_physics|5": { "hashes": { "hash_examples": "c3f2025990afec64", "hash_full_prompts": "fc78b4997e436734", "hash_input_tokens": "9c27af329cb41097", "hash_cont_tokens": "4e7053e7c19d680d" }, "truncated": 0, "non_truncated": 151, "padded": 604, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_psychology|5": { "hashes": { "hash_examples": "21f8aab618f6d636", "hash_full_prompts": "d5c76aa40b9dbc43", "hash_input_tokens": "192aef17a8956826", "hash_cont_tokens": "84d19ae8790476bb" }, "truncated": 0, "non_truncated": 545, "padded": 2180, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_statistics|5": { "hashes": { "hash_examples": "2386a60a11fc5de3", "hash_full_prompts": "4c5c8be5aafac432", "hash_input_tokens": "a9bc6c02c6f83983", "hash_cont_tokens": "b119c7b668213a4e" }, "truncated": 0, "non_truncated": 216, "padded": 864, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_us_history|5": { "hashes": { "hash_examples": "74961543be40f04f", "hash_full_prompts": "5d5ca4840131ba21", "hash_input_tokens": "14741fa2bd2a4414", "hash_cont_tokens": "a3b126bc622d571f" }, "truncated": 0, "non_truncated": 204, "padded": 816, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-high_school_world_history|5": { "hashes": { "hash_examples": "2ad2f6b7198b2234", "hash_full_prompts": "11845057459afd72", "hash_input_tokens": "67f306eb2bf3d2cb", "hash_cont_tokens": "9abf19ceb76331ff" }, "truncated": 0, "non_truncated": 237, "padded": 948, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-human_aging|5": { "hashes": { "hash_examples": "1a7199dc733e779b", "hash_full_prompts": "756b9096b8eaf892", "hash_input_tokens": "e5cc30c46358588f", "hash_cont_tokens": "0e2e725ae9a898da" }, "truncated": 0, "non_truncated": 223, "padded": 892, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-human_sexuality|5": { "hashes": { "hash_examples": "7acb8fdad97f88a6", "hash_full_prompts": "731a52ff15b8cfdb", "hash_input_tokens": "10a6536adeac8632", "hash_cont_tokens": "a94c1dea6d775249" }, "truncated": 0, "non_truncated": 131, "padded": 524, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-international_law|5": { "hashes": { "hash_examples": "1300bfd0dfc59114", "hash_full_prompts": "db2aefbff5eec996", "hash_input_tokens": "d9015aba41ce0d5c", "hash_cont_tokens": "3832f860859bb86b" }, "truncated": 0, "non_truncated": 121, "padded": 484, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-jurisprudence|5": { "hashes": { "hash_examples": "083b1e4904c48dc2", "hash_full_prompts": "0f89ee3fe03d6a21", "hash_input_tokens": "d5f2109de63c3402", "hash_cont_tokens": "9fac5a0c364fca8a" }, "truncated": 0, "non_truncated": 108, "padded": 432, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-logical_fallacies|5": { "hashes": { "hash_examples": "709128f9926a634c", "hash_full_prompts": "98a04b1f8f841069", "hash_input_tokens": "e0b39eb7c9788cfe", "hash_cont_tokens": "dc53ed31134ddf3a" }, "truncated": 0, "non_truncated": 163, "padded": 644, "non_padded": 8, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-machine_learning|5": { "hashes": { "hash_examples": "88f22a636029ae47", "hash_full_prompts": "2e1c8d4b1e0cc921", "hash_input_tokens": "643a872ad0f99bb0", "hash_cont_tokens": "e272b5456d5552d6" }, "truncated": 0, "non_truncated": 112, "padded": 448, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-management|5": { "hashes": { "hash_examples": "8c8a1e07a2151dca", "hash_full_prompts": "f51611f514b265b0", "hash_input_tokens": "1232c5b0f524b151", "hash_cont_tokens": "7119d4642957b1f0" }, "truncated": 0, "non_truncated": 103, "padded": 412, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-marketing|5": { "hashes": { "hash_examples": "2668953431f91e96", "hash_full_prompts": "77562bef997c7650", "hash_input_tokens": "f1d76d4a1e08e901", "hash_cont_tokens": "099d58c66ece3f11" }, "truncated": 0, "non_truncated": 234, "padded": 936, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-medical_genetics|5": { "hashes": { "hash_examples": "9c2dda34a2ea4fd2", "hash_full_prompts": "202139046daa118f", "hash_input_tokens": "cd181ff20fe83b83", "hash_cont_tokens": "bcc22fd85dcc85e9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-miscellaneous|5": { "hashes": { "hash_examples": "41adb694024809c2", "hash_full_prompts": "bffec9fc237bcf93", "hash_input_tokens": "a3d90d10e2efc569", "hash_cont_tokens": "bae342d4e82ba8f7" }, "truncated": 0, "non_truncated": 783, "padded": 3132, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-moral_disputes|5": { "hashes": { "hash_examples": "3171c13ba3c594c4", "hash_full_prompts": "170831fc36f1d59e", "hash_input_tokens": "4b35576715cc147a", "hash_cont_tokens": "578c64cbdbb1e0d4" }, "truncated": 0, "non_truncated": 346, "padded": 1384, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-moral_scenarios|5": { "hashes": { "hash_examples": "9873e077e83e0546", "hash_full_prompts": "08f4ceba3131a068", "hash_input_tokens": "1b93703ae85294ee", "hash_cont_tokens": "79b25f42b3fce0f9" }, "truncated": 0, "non_truncated": 895, "padded": 3580, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-nutrition|5": { "hashes": { "hash_examples": "7db1d8142ec14323", "hash_full_prompts": "4c0e68e3586cb453", "hash_input_tokens": "6741a26253bd4258", "hash_cont_tokens": "9d1f3b976417156c" }, "truncated": 0, "non_truncated": 306, "padded": 1224, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-philosophy|5": { "hashes": { "hash_examples": "9b455b7d72811cc8", "hash_full_prompts": "e467f822d8a0d3ff", "hash_input_tokens": "730a52e273f8fcf5", "hash_cont_tokens": "88dab560e1e06d97" }, "truncated": 0, "non_truncated": 311, "padded": 1244, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-prehistory|5": { "hashes": { "hash_examples": "8be90d0f538f1560", "hash_full_prompts": "152187949bcd0921", "hash_input_tokens": "9e211e939e14b414", "hash_cont_tokens": "04ea847139fe9393" }, "truncated": 0, "non_truncated": 324, "padded": 1296, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_accounting|5": { "hashes": { "hash_examples": "8d377597916cd07e", "hash_full_prompts": "0eb7345d6144ee0d", "hash_input_tokens": "d5761e6be99ed835", "hash_cont_tokens": "0435ff692ad17e68" }, "truncated": 0, "non_truncated": 282, "padded": 1124, "non_padded": 4, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_law|5": { "hashes": { "hash_examples": "cd9dbc52b3c932d6", "hash_full_prompts": "36ac764272bfb182", "hash_input_tokens": "fcbc59834dbaa06c", "hash_cont_tokens": "b852c74e9f8801bd" }, "truncated": 0, "non_truncated": 1534, "padded": 6136, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_medicine|5": { "hashes": { "hash_examples": "b20e4e816c1e383e", "hash_full_prompts": "7b8d69ea2acaf2f7", "hash_input_tokens": "ba5999ee85a41b08", "hash_cont_tokens": "5db0f6460652d063" }, "truncated": 0, "non_truncated": 272, "padded": 1088, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-professional_psychology|5": { "hashes": { "hash_examples": "d45b73b22f9cc039", "hash_full_prompts": "fe8937e9ffc99771", "hash_input_tokens": "35652463c3b2d9c6", "hash_cont_tokens": "c960676ef7f3dbe5" }, "truncated": 0, "non_truncated": 612, "padded": 2448, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-public_relations|5": { "hashes": { "hash_examples": "0d25072e1761652a", "hash_full_prompts": "f9adc39cfa9f42ba", "hash_input_tokens": "af501bc2c58d000f", "hash_cont_tokens": "3320565f412c4b01" }, "truncated": 0, "non_truncated": 110, "padded": 440, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-security_studies|5": { "hashes": { "hash_examples": "62bb8197e63d60d4", "hash_full_prompts": "869c9c3ae196b7c3", "hash_input_tokens": "5df7af45226ffc3a", "hash_cont_tokens": "218ed775ef60aab9" }, "truncated": 0, "non_truncated": 245, "padded": 980, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-sociology|5": { "hashes": { "hash_examples": "e7959df87dea8672", "hash_full_prompts": "1a1fc00e17b3a52a", "hash_input_tokens": "5dc2e3734f4dd402", "hash_cont_tokens": "20babf5cc4cc7f3d" }, "truncated": 0, "non_truncated": 201, "padded": 804, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-us_foreign_policy|5": { "hashes": { "hash_examples": "4a56a01ddca44dca", "hash_full_prompts": "0c7a7081c71c07b6", "hash_input_tokens": "ed972b660c40d1e4", "hash_cont_tokens": "bcc22fd85dcc85e9" }, "truncated": 0, "non_truncated": 100, "padded": 400, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-virology|5": { "hashes": { "hash_examples": "451cc86a8c4f4fe9", "hash_full_prompts": "01e95325d8b738e4", "hash_input_tokens": "ed703c55cc114c98", "hash_cont_tokens": "dc6d57296bea0882" }, "truncated": 0, "non_truncated": 166, "padded": 664, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|hendrycksTest-world_religions|5": { "hashes": { "hash_examples": "3b29cfaf1a81c379", "hash_full_prompts": "e0d79a15083dfdff", "hash_input_tokens": "00cf9f5943b1480b", "hash_cont_tokens": "37f53444db289ed3" }, "truncated": 0, "non_truncated": 171, "padded": 684, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|truthfulqa:mc|0": { "hashes": { "hash_examples": "23176c0531c7b867", "hash_full_prompts": "36a6d90e75d92d4a", "hash_input_tokens": "5e931dfc6ab75011", "hash_cont_tokens": "71a67034827cd30e" }, "truncated": 0, "non_truncated": 817, "padded": 9996, "non_padded": 0, "effective_few_shots": 0, "num_truncated_few_shots": 0 }, "harness|winogrande|5": { "hashes": { "hash_examples": "aada0a176fd81218", "hash_full_prompts": "c8655cbd12de8409", "hash_input_tokens": "bd055e8ba456ab4a", "hash_cont_tokens": "c93e9c22fa3077a0" }, "truncated": 0, "non_truncated": 1267, "padded": 2534, "non_padded": 0, "effective_few_shots": 5, "num_truncated_few_shots": 0 }, "harness|gsm8k|5": { "hashes": { "hash_examples": "4c0843a5d99bcfdc", "hash_full_prompts": "41d55e83abc0e02d", "hash_input_tokens": "5cae6c4034435931", "hash_cont_tokens": "261f54d6603ee2bd" }, "truncated": 0, "non_truncated": 1319, "padded": 0, "non_padded": 1319, "effective_few_shots": 5, "num_truncated_few_shots": 0 } }
{ "hashes": { "hash_examples": "3b7fa57a057f9415", "hash_full_prompts": "63615fc50fc9417c", "hash_input_tokens": "2f7ca631fba4ce39", "hash_cont_tokens": "e621b4a7c3fa87a7" }, "truncated": 0, "non_truncated": 28659, "padded": 113445, "non_padded": 1427, "num_truncated_few_shots": 0 }
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED)
{"harness|arc:challenge|25":{"acc":0.7167235494880546,"acc_stderr":0.013167478735134575,"acc_norm":0(...TRUNCATED)
{"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED)
{"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED)
{"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED)
{"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED)
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED)
{"harness|arc:challenge|25":{"acc":0.7192832764505119,"acc_stderr":0.013131238126975583,"acc_norm":0(...TRUNCATED)
{"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED)
{"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED)
{"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED)
{"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED)
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED)
{"harness|arc:challenge|25":{"acc":0.7081911262798635,"acc_stderr":0.013284525292403503,"acc_norm":0(...TRUNCATED)
{"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED)
{"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED)
{"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED)
{"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED)
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED)
{"harness|arc:challenge|25":{"acc":0.7704778156996587,"acc_stderr":0.012288926760890797,"acc_norm":0(...TRUNCATED)
{"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED)
{"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED)
{"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED)
{"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED)
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED)
{"harness|arc:challenge|25":{"acc":0.7704778156996587,"acc_stderr":0.012288926760890797,"acc_norm":0(...TRUNCATED)
{"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED)
{"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED)
{"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED)
{"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED)
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED)
{"harness|arc:challenge|25":{"acc":0.7627986348122867,"acc_stderr":0.012430399829260851,"acc_norm":0(...TRUNCATED)
{"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED)
{"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED)
{"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED)
{"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED)
{"lighteval_sha":"494ee12240e716e804ae9ea834f84a2c864c07ca","num_few_shot_default":0,"num_fewshot_se(...TRUNCATED)
{"harness|arc:challenge|25":{"acc":0.7627986348122867,"acc_stderr":0.012430399829260851,"acc_norm":0(...TRUNCATED)
{"all":0,"harness|arc:challenge|25":0,"harness|gsm8k|5":0,"harness|hellaswag|10":0,"harness|hendryck(...TRUNCATED)
{"harness|arc:challenge":"LM Harness task","harness|gsm8k":"LM Harness task","harness|hellaswag":"LM(...TRUNCATED)
{"harness|arc:challenge|25":{"hashes":{"hash_examples":"17b0cae357c0259e","hash_full_prompts":"045cb(...TRUNCATED)
{"hashes":{"hash_examples":"3b7fa57a057f9415","hash_full_prompts":"63615fc50fc9417c","hash_input_tok(...TRUNCATED)
README.md exists but content is empty. Use the Edit dataset card button to edit it.
Downloads last month
34
Edit dataset card