{ "config_general": { "lighteval_sha": "1.4", "num_few_shot_default": null, "num_fewshot_seeds": null, "override_batch_size": null, "max_samples": null, "job_id": -1, "start_time": null, "end_time": "2024-05-13-15-29-15", "total_evaluation_time_secondes": "", "model_name": "leliuga/Llama-2-13b-chat-hf-bnb-4bit", "model_sha": "", "model_dtype": "4bit", "model_size": 7.2, "model_params": 13.08, "quant_type": "bitsandbytes", "precision": "4bit" }, "results": { "harness|winogrande|0": { "acc,none": 0.6961325966850829, "acc_stderr,none": 0.012926209475483577, "alias": "winogrande" }, "harness|piqa|0": { "acc,none": 0.7763873775843307, "acc_stderr,none": 0.009721489519176294, "acc_norm,none": 0.7878128400435256, "acc_norm_stderr,none": 0.009539299828174096, "alias": "piqa" }, "harness|lambada:openai|0": { "perplexity,none": 2.9447294626493585, "perplexity_stderr,none": 0.0704565389146536, "acc,none": 0.7321948379584708, "acc_stderr,none": 0.006169285604118633, "alias": "lambada_openai" }, "harness|boolq|0": { "acc,none": 0.8024464831804281, "acc_stderr,none": 0.006963746631628732, "alias": "boolq" }, "harness|mmlu|0": { "acc,none": 0.5222190571143712, "acc_stderr,none": 0.00399406528780583, "alias": "mmlu" }, "harness|mmlu_humanities|0": { "alias": " - humanities", "acc,none": 0.48310308182784273, "acc_stderr,none": 0.0068136684594462425 }, "harness|mmlu_formal_logic|0": { "alias": " - formal_logic", "acc,none": 0.2698412698412698, "acc_stderr,none": 0.03970158273235173 }, "harness|mmlu_high_school_european_history|0": { "alias": " - high_school_european_history", "acc,none": 0.6666666666666666, "acc_stderr,none": 0.0368105086916155 }, "harness|mmlu_high_school_us_history|0": { "alias": " - high_school_us_history", "acc,none": 0.7450980392156863, "acc_stderr,none": 0.030587591351604246 }, "harness|mmlu_high_school_world_history|0": { "alias": " - high_school_world_history", "acc,none": 0.7215189873417721, "acc_stderr,none": 0.02917868230484255 }, "harness|mmlu_international_law|0": { "alias": " - international_law", "acc,none": 0.7355371900826446, "acc_stderr,none": 0.04026187527591206 }, "harness|mmlu_jurisprudence|0": { "alias": " - jurisprudence", "acc,none": 0.6666666666666666, "acc_stderr,none": 0.04557239513497751 }, "harness|mmlu_logical_fallacies|0": { "alias": " - logical_fallacies", "acc,none": 0.6932515337423313, "acc_stderr,none": 0.03623089915724148 }, "harness|mmlu_moral_disputes|0": { "alias": " - moral_disputes", "acc,none": 0.5838150289017341, "acc_stderr,none": 0.026538189104705484 }, "harness|mmlu_moral_scenarios|0": { "alias": " - moral_scenarios", "acc,none": 0.24581005586592178, "acc_stderr,none": 0.014400296429225627 }, "harness|mmlu_philosophy|0": { "alias": " - philosophy", "acc,none": 0.6237942122186495, "acc_stderr,none": 0.027513925683549427 }, "harness|mmlu_prehistory|0": { "alias": " - prehistory", "acc,none": 0.5925925925925926, "acc_stderr,none": 0.027339546640662734 }, "harness|mmlu_professional_law|0": { "alias": " - professional_law", "acc,none": 0.3859191655801825, "acc_stderr,none": 0.012433398911476138 }, "harness|mmlu_world_religions|0": { "alias": " - world_religions", "acc,none": 0.7719298245614035, "acc_stderr,none": 0.032180937956023566 }, "harness|mmlu_other|0": { "alias": " - other", "acc,none": 0.5983263598326359, "acc_stderr,none": 0.00848786302301734 }, "harness|mmlu_business_ethics|0": { "alias": " - business_ethics", "acc,none": 0.51, "acc_stderr,none": 0.05024183937956912 }, "harness|mmlu_clinical_knowledge|0": { "alias": " - clinical_knowledge", "acc,none": 0.569811320754717, "acc_stderr,none": 0.030471445867183238 }, "harness|mmlu_college_medicine|0": { "alias": " - college_medicine", "acc,none": 0.4682080924855491, "acc_stderr,none": 0.03804749744364764 }, "harness|mmlu_global_facts|0": { "alias": " - global_facts", "acc,none": 0.33, "acc_stderr,none": 0.047258156262526045 }, "harness|mmlu_human_aging|0": { "alias": " - human_aging", "acc,none": 0.6143497757847534, "acc_stderr,none": 0.03266842214289201 }, "harness|mmlu_management|0": { "alias": " - management", "acc,none": 0.6893203883495146, "acc_stderr,none": 0.04582124160161551 }, "harness|mmlu_marketing|0": { "alias": " - marketing", "acc,none": 0.7905982905982906, "acc_stderr,none": 0.026655699653922737 }, "harness|mmlu_medical_genetics|0": { "alias": " - medical_genetics", "acc,none": 0.59, "acc_stderr,none": 0.049431107042371025 }, "harness|mmlu_miscellaneous|0": { "alias": " - miscellaneous", "acc,none": 0.7432950191570882, "acc_stderr,none": 0.015620480263064533 }, "harness|mmlu_nutrition|0": { "alias": " - nutrition", "acc,none": 0.6078431372549019, "acc_stderr,none": 0.027956046165424513 }, "harness|mmlu_professional_accounting|0": { "alias": " - professional_accounting", "acc,none": 0.41134751773049644, "acc_stderr,none": 0.02935491115994098 }, "harness|mmlu_professional_medicine|0": { "alias": " - professional_medicine", "acc,none": 0.4889705882352941, "acc_stderr,none": 0.030365446477275668 }, "harness|mmlu_virology|0": { "alias": " - virology", "acc,none": 0.4457831325301205, "acc_stderr,none": 0.03869543323472101 }, "harness|mmlu_social_sciences|0": { "alias": " - social_sciences", "acc,none": 0.6074098147546312, "acc_stderr,none": 0.008568371486875524 }, "harness|mmlu_econometrics|0": { "alias": " - econometrics", "acc,none": 0.2631578947368421, "acc_stderr,none": 0.041424397194893624 }, "harness|mmlu_high_school_geography|0": { "alias": " - high_school_geography", "acc,none": 0.6414141414141414, "acc_stderr,none": 0.0341690364039152 }, "harness|mmlu_high_school_government_and_politics|0": { "alias": " - high_school_government_and_politics", "acc,none": 0.7357512953367875, "acc_stderr,none": 0.03182155050916646 }, "harness|mmlu_high_school_macroeconomics|0": { "alias": " - high_school_macroeconomics", "acc,none": 0.48205128205128206, "acc_stderr,none": 0.025334667080954932 }, "harness|mmlu_high_school_microeconomics|0": { "alias": " - high_school_microeconomics", "acc,none": 0.5084033613445378, "acc_stderr,none": 0.0324739027656967 }, "harness|mmlu_high_school_psychology|0": { "alias": " - high_school_psychology", "acc,none": 0.7064220183486238, "acc_stderr,none": 0.01952515112263966 }, "harness|mmlu_human_sexuality|0": { "alias": " - human_sexuality", "acc,none": 0.6412213740458015, "acc_stderr,none": 0.04206739313864908 }, "harness|mmlu_professional_psychology|0": { "alias": " - professional_psychology", "acc,none": 0.5441176470588235, "acc_stderr,none": 0.02014893942041575 }, "harness|mmlu_public_relations|0": { "alias": " - public_relations", "acc,none": 0.6090909090909091, "acc_stderr,none": 0.04673752333670238 }, "harness|mmlu_security_studies|0": { "alias": " - security_studies", "acc,none": 0.6653061224489796, "acc_stderr,none": 0.030209235226242307 }, "harness|mmlu_sociology|0": { "alias": " - sociology", "acc,none": 0.746268656716418, "acc_stderr,none": 0.03076944496729602 }, "harness|mmlu_us_foreign_policy|0": { "alias": " - us_foreign_policy", "acc,none": 0.79, "acc_stderr,none": 0.040936018074033256 }, "harness|mmlu_stem|0": { "alias": " - stem", "acc,none": 0.4224548049476689, "acc_stderr,none": 0.00855257101480014 }, "harness|mmlu_abstract_algebra|0": { "alias": " - abstract_algebra", "acc,none": 0.3, "acc_stderr,none": 0.046056618647183814 }, "harness|mmlu_anatomy|0": { "alias": " - anatomy", "acc,none": 0.5259259259259259, "acc_stderr,none": 0.04313531696750575 }, "harness|mmlu_astronomy|0": { "alias": " - astronomy", "acc,none": 0.5657894736842105, "acc_stderr,none": 0.04033565667848319 }, "harness|mmlu_college_biology|0": { "alias": " - college_biology", "acc,none": 0.5486111111111112, "acc_stderr,none": 0.04161402398403279 }, "harness|mmlu_college_chemistry|0": { "alias": " - college_chemistry", "acc,none": 0.35, "acc_stderr,none": 0.0479372485441102 }, "harness|mmlu_college_computer_science|0": { "alias": " - college_computer_science", "acc,none": 0.48, "acc_stderr,none": 0.050211673156867795 }, "harness|mmlu_college_mathematics|0": { "alias": " - college_mathematics", "acc,none": 0.3, "acc_stderr,none": 0.046056618647183814 }, "harness|mmlu_college_physics|0": { "alias": " - college_physics", "acc,none": 0.30392156862745096, "acc_stderr,none": 0.045766654032077636 }, "harness|mmlu_computer_security|0": { "alias": " - computer_security", "acc,none": 0.66, "acc_stderr,none": 0.04760952285695237 }, "harness|mmlu_conceptual_physics|0": { "alias": " - conceptual_physics", "acc,none": 0.39574468085106385, "acc_stderr,none": 0.031967586978353627 }, "harness|mmlu_electrical_engineering|0": { "alias": " - electrical_engineering", "acc,none": 0.47586206896551725, "acc_stderr,none": 0.041618085035015295 }, "harness|mmlu_elementary_mathematics|0": { "alias": " - elementary_mathematics", "acc,none": 0.328042328042328, "acc_stderr,none": 0.024180497164376896 }, "harness|mmlu_high_school_biology|0": { "alias": " - high_school_biology", "acc,none": 0.6193548387096774, "acc_stderr,none": 0.027621717832907036 }, "harness|mmlu_high_school_chemistry|0": { "alias": " - high_school_chemistry", "acc,none": 0.4187192118226601, "acc_stderr,none": 0.03471192860518468 }, "harness|mmlu_high_school_computer_science|0": { "alias": " - high_school_computer_science", "acc,none": 0.59, "acc_stderr,none": 0.04943110704237102 }, "harness|mmlu_high_school_mathematics|0": { "alias": " - high_school_mathematics", "acc,none": 0.27037037037037037, "acc_stderr,none": 0.02708037281514567 }, "harness|mmlu_high_school_physics|0": { "alias": " - high_school_physics", "acc,none": 0.31788079470198677, "acc_stderr,none": 0.038020397601079024 }, "harness|mmlu_high_school_statistics|0": { "alias": " - high_school_statistics", "acc,none": 0.35185185185185186, "acc_stderr,none": 0.032568505702936464 }, "harness|mmlu_machine_learning|0": { "alias": " - machine_learning", "acc,none": 0.33035714285714285, "acc_stderr,none": 0.044642857142857116 }, "harness|arc:easy|0": { "acc,none": 0.7718855218855218, "acc_stderr,none": 0.008610355160815555, "acc_norm,none": 0.7441077441077442, "acc_norm_stderr,none": 0.008953950243013995, "alias": "arc_easy" }, "harness|hellaswag|0": { "acc,none": 0.6022704640509858, "acc_stderr,none": 0.004884287515461496, "acc_norm,none": 0.7938657637920733, "acc_norm_stderr,none": 0.004037012714039292, "alias": "hellaswag" }, "harness|truthfulqa:mc2|0": { "acc,none": 0.43023620991960937, "acc_stderr,none": 0.015553216656357404, "alias": "truthfulqa_mc2" }, "harness|openbookqa|0": { "acc,none": 0.35, "acc_stderr,none": 0.021352091786223104, "acc_norm,none": 0.448, "acc_norm_stderr,none": 0.02226169729227014, "alias": "openbookqa" }, "harness|arc:challenge|0": { "acc,none": 0.4726962457337884, "acc_stderr,none": 0.014589589101986, "acc_norm,none": 0.49573378839590443, "acc_norm_stderr,none": 0.014610858923956952, "alias": "arc_challenge" }, "harness|truthfulqa:mc1|0": { "acc,none": 0.2802937576499388, "acc_stderr,none": 0.015723139524608774, "alias": "truthfulqa_mc1" } }, "task_info": { "model": "leliuga/Llama-2-13b-chat-hf-bnb-4bit", "revision": "main", "private": false, "params": 7.2, "architectures": "LlamaForCausalLM", "quant_type": "bitsandbytes", "precision": "4bit", "model_params": 13.08, "model_size": 7.2, "weight_dtype": "int4", "compute_dtype": "float16", "gguf_ftype": "*Q4_0.gguf", "hardware": "gpu", "status": "Pending", "submitted_time": "2024-05-10T07:47:50Z", "model_type": "quantization", "job_id": -1, "job_start_time": null, "scripts": "ITREX" }, "quantization_config": { "bnb_4bit_compute_dtype": "float16", "bnb_4bit_quant_type": "nf4", "bnb_4bit_use_double_quant": true, "llm_int8_enable_fp32_cpu_offload": false, "llm_int8_has_fp16_weight": false, "llm_int8_skip_modules": null, "llm_int8_threshold": 6.0, "load_in_4bit": true, "load_in_8bit": false }, "versions": { "harness|winogrande|0": 1.0, "harness|piqa|0": 1.0, "harness|lambada:openai|0": 1.0, "harness|boolq|0": 2.0, "harness|mmlu|0": null, "harness|mmlu_humanities|0": null, "harness|mmlu_formal_logic|0": 0.0, "harness|mmlu_high_school_european_history|0": 0.0, "harness|mmlu_high_school_us_history|0": 0.0, "harness|mmlu_high_school_world_history|0": 0.0, "harness|mmlu_international_law|0": 0.0, "harness|mmlu_jurisprudence|0": 0.0, "harness|mmlu_logical_fallacies|0": 0.0, "harness|mmlu_moral_disputes|0": 0.0, "harness|mmlu_moral_scenarios|0": 0.0, "harness|mmlu_philosophy|0": 0.0, "harness|mmlu_prehistory|0": 0.0, "harness|mmlu_professional_law|0": 0.0, "harness|mmlu_world_religions|0": 0.0, "harness|mmlu_other|0": null, "harness|mmlu_business_ethics|0": 0.0, "harness|mmlu_clinical_knowledge|0": 0.0, "harness|mmlu_college_medicine|0": 0.0, "harness|mmlu_global_facts|0": 0.0, "harness|mmlu_human_aging|0": 0.0, "harness|mmlu_management|0": 0.0, "harness|mmlu_marketing|0": 0.0, "harness|mmlu_medical_genetics|0": 0.0, "harness|mmlu_miscellaneous|0": 0.0, "harness|mmlu_nutrition|0": 0.0, "harness|mmlu_professional_accounting|0": 0.0, "harness|mmlu_professional_medicine|0": 0.0, "harness|mmlu_virology|0": 0.0, "harness|mmlu_social_sciences|0": null, "harness|mmlu_econometrics|0": 0.0, "harness|mmlu_high_school_geography|0": 0.0, "harness|mmlu_high_school_government_and_politics|0": 0.0, "harness|mmlu_high_school_macroeconomics|0": 0.0, "harness|mmlu_high_school_microeconomics|0": 0.0, "harness|mmlu_high_school_psychology|0": 0.0, "harness|mmlu_human_sexuality|0": 0.0, "harness|mmlu_professional_psychology|0": 0.0, "harness|mmlu_public_relations|0": 0.0, "harness|mmlu_security_studies|0": 0.0, "harness|mmlu_sociology|0": 0.0, "harness|mmlu_us_foreign_policy|0": 0.0, "harness|mmlu_stem|0": null, "harness|mmlu_abstract_algebra|0": 0.0, "harness|mmlu_anatomy|0": 0.0, "harness|mmlu_astronomy|0": 0.0, "harness|mmlu_college_biology|0": 0.0, "harness|mmlu_college_chemistry|0": 0.0, "harness|mmlu_college_computer_science|0": 0.0, "harness|mmlu_college_mathematics|0": 0.0, "harness|mmlu_college_physics|0": 0.0, "harness|mmlu_computer_security|0": 0.0, "harness|mmlu_conceptual_physics|0": 0.0, "harness|mmlu_electrical_engineering|0": 0.0, "harness|mmlu_elementary_mathematics|0": 0.0, "harness|mmlu_high_school_biology|0": 0.0, "harness|mmlu_high_school_chemistry|0": 0.0, "harness|mmlu_high_school_computer_science|0": 0.0, "harness|mmlu_high_school_mathematics|0": 0.0, "harness|mmlu_high_school_physics|0": 0.0, "harness|mmlu_high_school_statistics|0": 0.0, "harness|mmlu_machine_learning|0": 0.0, "harness|arc:easy|0": 1.0, "harness|hellaswag|0": 1.0, "harness|truthfulqa:mc2|0": 2.0, "harness|openbookqa|0": 1.0, "harness|arc:challenge|0": 1.0, "harness|truthfulqa:mc1|0": 2.0 }, "n-shot": { "arc_challenge": 0, "arc_easy": 0, "boolq": 0, "hellaswag": 0, "lambada_openai": 0, "mmlu": 0, "mmlu_abstract_algebra": 0, "mmlu_anatomy": 0, "mmlu_astronomy": 0, "mmlu_business_ethics": 0, "mmlu_clinical_knowledge": 0, "mmlu_college_biology": 0, "mmlu_college_chemistry": 0, "mmlu_college_computer_science": 0, "mmlu_college_mathematics": 0, "mmlu_college_medicine": 0, "mmlu_college_physics": 0, "mmlu_computer_security": 0, "mmlu_conceptual_physics": 0, "mmlu_econometrics": 0, "mmlu_electrical_engineering": 0, "mmlu_elementary_mathematics": 0, "mmlu_formal_logic": 0, "mmlu_global_facts": 0, "mmlu_high_school_biology": 0, "mmlu_high_school_chemistry": 0, "mmlu_high_school_computer_science": 0, "mmlu_high_school_european_history": 0, "mmlu_high_school_geography": 0, "mmlu_high_school_government_and_politics": 0, "mmlu_high_school_macroeconomics": 0, "mmlu_high_school_mathematics": 0, "mmlu_high_school_microeconomics": 0, "mmlu_high_school_physics": 0, "mmlu_high_school_psychology": 0, "mmlu_high_school_statistics": 0, "mmlu_high_school_us_history": 0, "mmlu_high_school_world_history": 0, "mmlu_human_aging": 0, "mmlu_human_sexuality": 0, "mmlu_humanities": 0, "mmlu_international_law": 0, "mmlu_jurisprudence": 0, "mmlu_logical_fallacies": 0, "mmlu_machine_learning": 0, "mmlu_management": 0, "mmlu_marketing": 0, "mmlu_medical_genetics": 0, "mmlu_miscellaneous": 0, "mmlu_moral_disputes": 0, "mmlu_moral_scenarios": 0, "mmlu_nutrition": 0, "mmlu_other": 0, "mmlu_philosophy": 0, "mmlu_prehistory": 0, "mmlu_professional_accounting": 0, "mmlu_professional_law": 0, "mmlu_professional_medicine": 0, "mmlu_professional_psychology": 0, "mmlu_public_relations": 0, "mmlu_security_studies": 0, "mmlu_social_sciences": 0, "mmlu_sociology": 0, "mmlu_stem": 0, "mmlu_us_foreign_policy": 0, "mmlu_virology": 0, "mmlu_world_religions": 0, "openbookqa": 0, "piqa": 0, "truthfulqa_mc1": 0, "truthfulqa_mc2": 0, "winogrande": 0 }, "date": 1715567404.117856, "config": { "model": "hf", "model_args": "pretrained=leliuga/Llama-2-13b-chat-hf-bnb-4bit,trust_remote_code=True,dtype=float16,_commit_hash=main", "batch_size": 1, "batch_sizes": [], "device": "cuda", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null } }